This commit is contained in:
Franck Nijhof 2025-04-02 18:47:40 +02:00 committed by GitHub
commit e8b2a3de8b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1770 changed files with 81545 additions and 20967 deletions

View File

@ -32,7 +32,7 @@ jobs:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: translations
path: translations.tar.gz
@ -94,7 +94,7 @@ jobs:
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v8
uses: dawidd6/action-download-artifact@v9
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend
@ -105,7 +105,7 @@ jobs:
- name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v8
uses: dawidd6/action-download-artifact@v9
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/intents-package
@ -116,7 +116,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: translations
@ -190,14 +190,14 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2025.03.0
with:
args: |
$BUILD_ARGS \
@ -256,14 +256,14 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2025.03.0
with:
args: |
$BUILD_ARGS \
@ -330,14 +330,14 @@ jobs:
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.4.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@ -457,12 +457,12 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: translations
@ -502,14 +502,14 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -37,10 +37,10 @@ on:
type: boolean
env:
CACHE_VERSION: 11
CACHE_VERSION: 12
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2025.3"
HA_SHORT_VERSION: "2025.4"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@ -89,6 +89,7 @@ jobs:
test_groups: ${{ steps.info.outputs.test_groups }}
tests_glob: ${{ steps.info.outputs.tests_glob }}
tests: ${{ steps.info.outputs.tests }}
lint_only: ${{ steps.info.outputs.lint_only }}
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
runs-on: ubuntu-24.04
steps:
@ -142,6 +143,7 @@ jobs:
test_group_count=10
tests="[]"
tests_glob=""
lint_only=""
skip_coverage=""
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
@ -192,6 +194,17 @@ jobs:
test_full_suite="true"
fi
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|| [[ "${{ github.event_name }}" == "push" \
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
then
lint_only="true"
skip_coverage="true"
fi
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
then
@ -217,6 +230,8 @@ jobs:
echo "tests=${tests}" >> $GITHUB_OUTPUT
echo "tests_glob: ${tests_glob}"
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
echo "lint_only": ${lint_only}
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
echo "skip_coverage: ${skip_coverage}"
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
@ -234,13 +249,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.3
with:
path: venv
key: >-
@ -256,7 +271,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@ -279,14 +294,14 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -295,7 +310,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@ -319,14 +334,14 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -335,7 +350,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@ -359,14 +374,14 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -375,7 +390,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@ -469,7 +484,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -482,7 +497,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.3
with:
path: venv
key: >-
@ -490,7 +505,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.3
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@ -537,7 +552,7 @@ jobs:
python --version
uv pip freeze >> pip_freeze.txt
- name: Upload pip_freeze artifact
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pip-freeze-${{ matrix.python-version }}
path: pip_freeze.txt
@ -572,13 +587,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -605,13 +620,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -623,6 +638,25 @@ jobs:
. venv/bin/activate
python -m script.gen_requirements_all validate
dependency-review:
name: Dependency review
runs-on: ubuntu-24.04
needs:
- info
- base
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& needs.info.outputs.requirements == 'true'
&& github.event_name == 'pull_request'
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Dependency review
uses: actions/dependency-review-action@v4.5.0
with:
license-check: false # We use our own license audit checks
audit-licenses:
name: Audit licenses
runs-on: ubuntu-24.04
@ -643,13 +677,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -661,7 +695,7 @@ jobs:
. venv/bin/activate
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
- name: Upload licenses
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
path: licenses-${{ matrix.python-version }}.json
@ -686,13 +720,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -733,13 +767,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -778,7 +812,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -791,7 +825,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -799,7 +833,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.3
with:
path: .mypy_cache
key: >-
@ -829,11 +863,7 @@ jobs:
prepare-pytest-full:
runs-on: ubuntu-24.04
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.test_full_suite == 'true'
needs:
- info
@ -859,13 +889,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -877,7 +907,7 @@ jobs:
. venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pytest_buckets
path: pytest_buckets.txt
@ -886,11 +916,7 @@ jobs:
pytest-full:
runs-on: ubuntu-24.04
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.test_full_suite == 'true'
needs:
- info
@ -923,13 +949,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -942,7 +968,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: pytest_buckets
- name: Compile English translations
@ -962,6 +988,7 @@ jobs:
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
cov_params+=(--cov="homeassistant")
cov_params+=(--cov-report=xml)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
fi
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
@ -980,18 +1007,24 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
overwrite: true
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
with:
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
path: junit.xml
- name: Remove pytest_buckets
run: rm pytest_buckets.txt
- name: Check dirty
@ -1009,11 +1042,7 @@ jobs:
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.mariadb_groups != '[]'
needs:
- info
@ -1045,13 +1074,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -1088,6 +1117,7 @@ jobs:
cov_params+=(--cov="homeassistant.components.recorder")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
fi
python3 -b -X dev -m pytest \
@ -1108,7 +1138,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
@ -1116,12 +1146,19 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
path: coverage.xml
overwrite: true
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
with:
name: test-results-mariadb-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
path: junit.xml
- name: Check dirty
run: |
./script/check_dirty
@ -1137,11 +1174,7 @@ jobs:
POSTGRES_PASSWORD: password
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.postgresql_groups != '[]'
needs:
- info
@ -1175,13 +1208,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -1218,6 +1251,7 @@ jobs:
cov_params+=(--cov="homeassistant.components.recorder")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
fi
python3 -b -X dev -m pytest \
@ -1239,7 +1273,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
@ -1247,12 +1281,19 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
path: coverage.xml
overwrite: true
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
with:
name: test-results-postgres-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
path: junit.xml
- name: Check dirty
run: |
./script/check_dirty
@ -1271,12 +1312,12 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v5.3.1
uses: codecov/codecov-action@v5.4.0
with:
fail_ci_if_error: true
flags: full-suite
@ -1285,11 +1326,7 @@ jobs:
pytest-partial:
runs-on: ubuntu-24.04
if: |
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.tests_glob
&& needs.info.outputs.test_full_suite == 'false'
needs:
@ -1322,13 +1359,13 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.3
with:
path: venv
fail-on-cache-miss: true
@ -1365,6 +1402,7 @@ jobs:
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
fi
python3 -b -X dev -m pytest \
@ -1382,18 +1420,24 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
overwrite: true
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
with:
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
path: junit.xml
- name: Check dirty
run: |
./script/check_dirty
@ -1410,12 +1454,37 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v5.3.1
uses: codecov/codecov-action@v5.4.0
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
upload-test-results:
name: Upload test results to Codecov
# codecov/test-results-action currently doesn't support tokenless uploads
# therefore we can't run it on forks
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
runs-on: ubuntu-24.04
needs:
- info
- pytest-partial
- pytest-full
- pytest-postgres
- pytest-mariadb
timeout-minutes: 10
steps:
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.2.1
with:
pattern: test-results-*
- name: Upload test results to Codecov
uses: codecov/test-results-action@v1
with:
fail_ci_if_error: true
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.28.10
uses: github/codeql-action/init@v3.28.13
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.28.10
uses: github/codeql-action/analyze@v3.28.13
with:
category: "/language:python"

View File

@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@ -36,7 +36,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -91,7 +91,7 @@ jobs:
) > build_constraints.txt
- name: Upload env_file
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: env_file
path: ./.env_file
@ -99,14 +99,14 @@ jobs:
overwrite: true
- name: Upload build_constraints
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: build_constraints
path: ./build_constraints.txt
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: requirements_diff
path: ./requirements_diff.txt
@ -118,7 +118,7 @@ jobs:
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.2
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
@ -138,17 +138,17 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: requirements_diff
@ -159,7 +159,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2024.11.0
uses: home-assistant/wheels@2025.03.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@ -187,22 +187,22 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.2.1
with:
name: requirements_all_wheels
@ -219,7 +219,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2024.11.0
uses: home-assistant/wheels@2025.03.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

1
.gitignore vendored
View File

@ -69,6 +69,7 @@ test-reports/
test-results.xml
test-output.xml
pytest-*.txt
junit.xml
# Translations
*.mo

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.7
rev: v0.11.0
hooks:
- id: ruff
args:

View File

@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.*
homeassistant.components.bluetooth_tracker.*
homeassistant.components.bmw_connected_drive.*
homeassistant.components.bond.*
homeassistant.components.bosch_alarm.*
homeassistant.components.braviatv.*
homeassistant.components.bring.*
homeassistant.components.brother.*
@ -136,6 +137,7 @@ homeassistant.components.clicksend.*
homeassistant.components.climate.*
homeassistant.components.cloud.*
homeassistant.components.co2signal.*
homeassistant.components.comelit.*
homeassistant.components.command_line.*
homeassistant.components.config.*
homeassistant.components.configurator.*
@ -396,6 +398,7 @@ homeassistant.components.pure_energie.*
homeassistant.components.purpleair.*
homeassistant.components.pushbullet.*
homeassistant.components.pvoutput.*
homeassistant.components.pyload.*
homeassistant.components.python_script.*
homeassistant.components.qbus.*
homeassistant.components.qnap_qsw.*
@ -410,6 +413,7 @@ homeassistant.components.recollect_waste.*
homeassistant.components.recorder.*
homeassistant.components.remember_the_milk.*
homeassistant.components.remote.*
homeassistant.components.remote_calendar.*
homeassistant.components.renault.*
homeassistant.components.reolink.*
homeassistant.components.repairs.*
@ -528,6 +532,7 @@ homeassistant.components.vallox.*
homeassistant.components.valve.*
homeassistant.components.velbus.*
homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.*
homeassistant.components.wake_on_lan.*
homeassistant.components.wake_word.*
homeassistant.components.wallbox.*

2
.vscode/tasks.json vendored
View File

@ -4,7 +4,7 @@
{
"label": "Run Home Assistant Core",
"type": "shell",
"command": "hass -c ./config",
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
"group": "test",
"presentation": {
"reveal": "always",

16
CODEOWNERS generated
View File

@ -216,6 +216,8 @@ build.json @home-assistant/supervisor
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
/tests/components/bosch_alarm/ @mag1024 @sanjay900
/homeassistant/components/bosch_shc/ @tschamm
/tests/components/bosch_shc/ @tschamm
/homeassistant/components/braviatv/ @bieniu @Drafteed
@ -570,8 +572,8 @@ build.json @home-assistant/supervisor
/tests/components/google_cloud/ @lufton @tronikos
/homeassistant/components/google_drive/ @tronikos
/tests/components/google_drive/ @tronikos
/homeassistant/components/google_generative_ai_conversation/ @tronikos
/tests/components/google_generative_ai_conversation/ @tronikos
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
/homeassistant/components/google_mail/ @tkdrob
/tests/components/google_mail/ @tkdrob
/homeassistant/components/google_photos/ @allenporter
@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor
/tests/components/prusalink/ @balloob
/homeassistant/components/ps4/ @ktnrg45
/tests/components/ps4/ @ktnrg45
/homeassistant/components/pterodactyl/ @elmurato
/tests/components/pterodactyl/ @elmurato
/homeassistant/components/pure_energie/ @klaasnicolaas
/tests/components/pure_energie/ @klaasnicolaas
/homeassistant/components/purpleair/ @bachya
@ -1252,6 +1256,8 @@ build.json @home-assistant/supervisor
/tests/components/refoss/ @ashionky
/homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555
/tests/components/remote_calendar/ @Thomas55555
/homeassistant/components/renault/ @epenet
/tests/components/renault/ @epenet
/homeassistant/components/renson/ @jimmyd-be
@ -1474,8 +1480,6 @@ build.json @home-assistant/supervisor
/tests/components/suez_water/ @ooii @jb101010-2
/homeassistant/components/sun/ @Swamp-Ig
/tests/components/sun/ @Swamp-Ig
/homeassistant/components/sunweg/ @rokam
/tests/components/sunweg/ @rokam
/homeassistant/components/supla/ @mwegrzynek
/homeassistant/components/surepetcare/ @benleb @danielhiversen
/tests/components/surepetcare/ @benleb @danielhiversen
@ -1529,8 +1533,8 @@ build.json @home-assistant/supervisor
/tests/components/tedee/ @patrickhilker @zweckj
/homeassistant/components/tellduslive/ @fredrike
/tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
/tests/components/template/ @PhracturedBlue @home-assistant/core
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
/homeassistant/components/tesla_fleet/ @Bre77
/tests/components/tesla_fleet/ @Bre77
/homeassistant/components/tesla_wall_connector/ @einarhauks

4
Dockerfile generated
View File

@ -25,13 +25,13 @@ RUN \
"armv7") go2rtc_suffix='arm' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \
# Verify go2rtc can be executed
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.6.1
RUN pip3 install uv==0.6.10
WORKDIR /usr/src

View File

@ -19,4 +19,4 @@ labels:
org.opencontainers.image.authors: The Home Assistant Authors
org.opencontainers.image.url: https://www.home-assistant.io/
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
org.opencontainers.image.licenses: Apache License 2.0
org.opencontainers.image.licenses: Apache-2.0

View File

@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.set_default_verify_paths,
object=SSLContext,
function="set_default_verify_paths",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.open,
object=Path,

View File

@ -81,6 +81,7 @@ from .helpers import (
entity,
entity_registry,
floor_registry,
frame,
issue_registry,
label_registry,
recorder,
@ -92,6 +93,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal
from .helpers.storage import get_internal_store_manager
from .helpers.system_info import async_get_system_info
from .helpers.typing import ConfigType
from .loader import Integration
from .setup import (
# _setup_started is marked as protected to make it clear
# that it is not part of the public API and should not be used
@ -298,14 +300,6 @@ async def async_setup_hass(
return hass
async def stop_hass(hass: core.HomeAssistant) -> None:
"""Stop hass."""
# Ask integrations to shut down. It's messy but we can't
# do a clean stop without knowing what is broken
with contextlib.suppress(TimeoutError):
async with hass.timeout.async_timeout(10):
await hass.async_stop()
hass = await create_hass()
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
@ -344,7 +338,7 @@ async def async_setup_hass(
if config_dict is None:
recovery_mode = True
await stop_hass(hass)
await hass.async_stop(force=True)
hass = await create_hass()
elif not basic_setup_success:
@ -352,7 +346,7 @@ async def async_setup_hass(
"Unable to set up core integrations. Activating recovery mode"
)
recovery_mode = True
await stop_hass(hass)
await hass.async_stop(force=True)
hass = await create_hass()
elif any(
@ -367,7 +361,7 @@ async def async_setup_hass(
old_logging = hass.data.get(DATA_LOGGING)
recovery_mode = True
await stop_hass(hass)
await hass.async_stop(force=True)
hass = await create_hass()
if old_logging:
@ -441,9 +435,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
if DATA_REGISTRIES_LOADED in hass.data:
return
hass.data[DATA_REGISTRIES_LOADED] = None
translation.async_setup(hass)
entity.async_setup(hass)
frame.async_setup(hass)
template.async_setup(hass)
translation.async_setup(hass)
await asyncio.gather(
create_eager_task(get_internal_store_manager(hass).async_initialize()),
create_eager_task(area_registry.async_load(hass)),
@ -664,11 +659,10 @@ def _create_log_file(
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
err_log_path, backupCount=1
)
try:
err_handler.doRollover()
except OSError as err:
_LOGGER.error("Error rolling over log file: %s", err)
try:
err_handler.doRollover()
except OSError as err:
_LOGGER.error("Error rolling over log file: %s", err)
return err_handler
@ -718,20 +712,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
return domains
async def _async_resolve_domains_to_setup(
async def _async_resolve_domains_and_preload(
hass: core.HomeAssistant, config: dict[str, Any]
) -> tuple[set[str], dict[str, loader.Integration]]:
"""Resolve all dependencies and return list of domains to set up."""
) -> tuple[dict[str, Integration], dict[str, Integration]]:
"""Resolve all dependencies and return integrations to set up.
The return value is a tuple of two dictionaries:
- The first dictionary contains integrations
specified by the configuration (including config entries).
- The second dictionary contains the same integrations as the first dictionary
together with all their dependencies.
"""
domains_to_setup = _get_domains(hass, config)
needed_requirements: set[str] = set()
platform_integrations = conf_util.extract_platform_integrations(
config, BASE_PLATFORMS
)
# Ensure base platforms that have platform integrations are added to
# to `domains_to_setup so they can be setup first instead of
# discovering them when later when a config entry setup task
# notices its needed and there is already a long line to use
# the import executor.
# Ensure base platforms that have platform integrations are added to `domains`,
# so they can be setup first instead of discovering them later when a config
# entry setup task notices that it's needed and there is already a long line
# to use the import executor.
#
# For example if we have
# sensor:
@ -747,111 +746,78 @@ async def _async_resolve_domains_to_setup(
# so this will be less of a problem in the future.
domains_to_setup.update(platform_integrations)
# Load manifests for base platforms and platform based integrations
# that are defined under base platforms right away since we do not require
# the manifest to list them as dependencies and we want to avoid the lock
# contention when multiple integrations try to load them at once
additional_manifests_to_load = {
# Additionally process base platforms since we do not require the manifest
# to list them as dependencies.
# We want to later avoid lock contention when multiple integrations try to load
# their manifests at once.
# Also process integrations that are defined under base platforms
# to speed things up.
additional_domains_to_process = {
*BASE_PLATFORMS,
*chain.from_iterable(platform_integrations.values()),
}
translations_to_load = additional_manifests_to_load.copy()
# Resolve all dependencies so we know all integrations
# that will have to be loaded and start right-away
integration_cache: dict[str, loader.Integration] = {}
to_resolve: set[str] = domains_to_setup
while to_resolve or additional_manifests_to_load:
old_to_resolve: set[str] = to_resolve
to_resolve = set()
integrations_or_excs = await loader.async_get_integrations(
hass, {*domains_to_setup, *additional_domains_to_process}
)
# Eliminate those missing or with invalid manifest
integrations_to_process = {
domain: itg
for domain, itg in integrations_or_excs.items()
if isinstance(itg, Integration)
}
integrations_dependencies = await loader.resolve_integrations_dependencies(
hass, integrations_to_process.values()
)
# Eliminate those without valid dependencies
integrations_to_process = {
domain: integrations_to_process[domain] for domain in integrations_dependencies
}
if additional_manifests_to_load:
to_get = {*old_to_resolve, *additional_manifests_to_load}
additional_manifests_to_load.clear()
else:
to_get = old_to_resolve
integrations_to_setup = {
domain: itg
for domain, itg in integrations_to_process.items()
if domain in domains_to_setup
}
all_integrations_to_setup = integrations_to_setup.copy()
all_integrations_to_setup.update(
(dep, loader.async_get_loaded_integration(hass, dep))
for domain in integrations_to_setup
for dep in integrations_dependencies[domain].difference(
all_integrations_to_setup
)
)
manifest_deps: set[str] = set()
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
integrations_to_process: list[loader.Integration] = []
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
if not isinstance(itg, loader.Integration):
continue
integration_cache[domain] = itg
needed_requirements.update(itg.requirements)
# Make sure manifests for dependencies are loaded in the next
# loop to try to group as many as manifest loads in a single
# call to avoid the creating one-off executor jobs later in
# the setup process
additional_manifests_to_load.update(
dep
for dep in chain(itg.dependencies, itg.after_dependencies)
if dep not in integration_cache
)
if domain not in old_to_resolve:
continue
integrations_to_process.append(itg)
manifest_deps.update(itg.dependencies)
manifest_deps.update(itg.after_dependencies)
if not itg.all_dependencies_resolved:
resolve_dependencies_tasks.append(
create_eager_task(
itg.resolve_dependencies(),
name=f"resolve dependencies {domain}",
loop=hass.loop,
)
)
if unseen_deps := manifest_deps - integration_cache.keys():
# If there are dependencies, try to preload all
# the integrations manifest at once and add them
# to the list of requirements we need to install
# so we can try to check if they are already installed
# in a single call below which avoids each integration
# having to wait for the lock to do it individually
deps = await loader.async_get_integrations(hass, unseen_deps)
for dependant_domain, dependant_itg in deps.items():
if isinstance(dependant_itg, loader.Integration):
integration_cache[dependant_domain] = dependant_itg
needed_requirements.update(dependant_itg.requirements)
if resolve_dependencies_tasks:
await asyncio.gather(*resolve_dependencies_tasks)
for itg in integrations_to_process:
try:
all_deps = itg.all_dependencies
except RuntimeError:
# Integration.all_dependencies raises RuntimeError if
# dependencies could not be resolved
continue
for dep in all_deps:
if dep in domains_to_setup:
continue
domains_to_setup.add(dep)
to_resolve.add(dep)
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
# Gather requirements for all integrations,
# their dependencies and after dependencies.
# To gather all the requirements we must ignore exceptions here.
# The exceptions will be detected and handled later in the bootstrap process.
integrations_after_dependencies = (
await loader.resolve_integrations_after_dependencies(
hass, integrations_to_process.values(), ignore_exceptions=True
)
)
integrations_requirements = {
domain: itg.requirements for domain, itg in integrations_to_process.items()
}
integrations_requirements.update(
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
for deps in integrations_after_dependencies.values()
for dep in deps.difference(integrations_requirements)
)
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
# Optimistically check if requirements are already installed
# ahead of setting up the integrations so we can prime the cache
# We do not wait for this since its an optimization only
# We do not wait for this since it's an optimization only
hass.async_create_background_task(
requirements.async_load_installed_versions(hass, needed_requirements),
requirements.async_load_installed_versions(hass, all_requirements),
"check installed requirements",
eager_start=True,
)
#
# Only add the domains_to_setup after we finish resolving
# as new domains are likely to added in the process
#
translations_to_load.update(domains_to_setup)
# Start loading translations for all integrations we are going to set up
# in the background so they are ready when we need them. This avoids a
# lot of waiting for the translation load lock and a thundering herd of
@ -862,6 +828,7 @@ async def _async_resolve_domains_to_setup(
# hold the translation load lock and if anything is fast enough to
# wait for the translation load lock, loading will be done by the
# time it gets to it.
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
hass.async_create_background_task(
translation.async_load_integrations(hass, translations_to_load),
"load translations",
@ -873,13 +840,13 @@ async def _async_resolve_domains_to_setup(
# in the setup process.
hass.async_create_background_task(
get_internal_store_manager(hass).async_preload(
[*PRELOAD_STORAGE, *domains_to_setup]
[*PRELOAD_STORAGE, *all_integrations_to_setup]
),
"preload storage",
eager_start=True,
)
return domains_to_setup, integration_cache
return integrations_to_setup, all_integrations_to_setup
async def _async_set_up_integrations(
@ -889,69 +856,90 @@ async def _async_set_up_integrations(
watcher = _WatchPendingSetups(hass, _setup_started(hass))
watcher.async_start()
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
integrations, all_integrations = await _async_resolve_domains_and_preload(
hass, config
)
stage_2_domains = domains_to_setup.copy()
all_domains = set(all_integrations)
domains = set(integrations)
_LOGGER.info(
"Domains to be set up: %s | %s",
domains,
all_domains - domains,
)
# Initialize recorder
if "recorder" in domains_to_setup:
if "recorder" in all_domains:
recorder.async_initialize_recorder(hass)
# Initialize backup
if "backup" in domains_to_setup:
if "backup" in all_domains:
backup.async_initialize_backup(hass)
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
stages: list[tuple[str, set[str], int | None]] = [
*(
(name, domain_group & domains_to_setup, timeout)
(name, domain_group, timeout)
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
),
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
("2", domains, STAGE_2_TIMEOUT),
]
_LOGGER.info("Setting up stage 0 and 1")
for name, domain_group, timeout in stage_0_and_1_domains:
if not domain_group:
_LOGGER.info("Setting up stage 0")
for name, domain_group, timeout in stages:
stage_domains_unfiltered = domain_group & all_domains
if not stage_domains_unfiltered:
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
continue
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
stage_domains = stage_domains_unfiltered - hass.config.components
if not stage_domains:
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
continue
stage_dep_domains_unfiltered = {
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
for domain in stage_domains
for dep in all_integrations[domain].all_dependencies
if dep not in stage_domains
}
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
stage_all_domains = stage_domains | stage_dep_domains
stage_all_integrations = {
domain: all_integrations[domain] for domain in stage_all_domains
}
# Detect all cycles
stage_integrations_after_dependencies = (
await loader.resolve_integrations_after_dependencies(
hass, stage_all_integrations.values(), stage_all_domains
)
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
stage_2_domains -= to_be_loaded
stage_all_domains = set(stage_integrations_after_dependencies)
stage_domains &= stage_all_domains
stage_dep_domains &= stage_all_domains
_LOGGER.info(
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
name,
stage_domains,
stage_domains_unfiltered - stage_domains,
stage_dep_domains,
stage_dep_domains_unfiltered - stage_dep_domains,
)
async_set_domains_to_be_loaded(hass, stage_all_domains)
if timeout is None:
await _async_setup_multi_components(hass, domain_group, config)
else:
try:
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
await _async_setup_multi_components(hass, domain_group, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for %s waiting on %s - moving forward",
name,
hass._active_tasks, # noqa: SLF001
)
# Add after dependencies when setting up stage 2 domains
async_set_domains_to_be_loaded(hass, stage_2_domains)
if stage_2_domains:
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
await _async_setup_multi_components(hass, stage_all_domains, config)
continue
try:
async with hass.timeout.async_timeout(
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
):
await _async_setup_multi_components(hass, stage_2_domains, config)
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
await _async_setup_multi_components(hass, stage_all_domains, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 2 waiting on %s - moving forward",
"Setup timed out for stage %s waiting on %s - moving forward",
name,
hass._active_tasks, # noqa: SLF001
)
@ -1053,8 +1041,6 @@ async def _async_setup_multi_components(
config: dict[str, Any],
) -> None:
"""Set up multiple domains. Log on failure."""
# Avoid creating tasks for domains that were setup in a previous stage
domains_not_yet_setup = domains - hass.config.components
# Create setup tasks for base platforms first since everything will have
# to wait to be imported, and the sooner we can get the base platforms
# loaded the sooner we can start loading the rest of the integrations.
@ -1064,9 +1050,7 @@ async def _async_setup_multi_components(
f"setup component {domain}",
eager_start=True,
)
for domain in sorted(
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
)
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
}
results = await asyncio.gather(*futures.values(), return_exceptions=True)
for idx, domain in enumerate(futures):

View File

@ -0,0 +1,5 @@
{
"domain": "bosch",
"name": "Bosch",
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
}

View File

@ -0,0 +1,5 @@
{
"domain": "eve",
"name": "Eve",
"iot_standards": ["matter"]
}

View File

@ -1,5 +1,6 @@
{
"domain": "motionblinds",
"name": "Motionblinds",
"integrations": ["motion_blinds", "motionblinds_ble"]
"integrations": ["motion_blinds", "motionblinds_ble"],
"iot_standards": ["matter"]
}

View File

@ -24,7 +24,7 @@ from homeassistant.components.weather import (
API_METRIC: Final = "Metric"
ATTRIBUTION: Final = "Data provided by AccuWeather"
ATTR_CATEGORY: Final = "Category"
ATTR_CATEGORY_VALUE = "CategoryValue"
ATTR_DIRECTION: Final = "Direction"
ATTR_ENGLISH: Final = "English"
ATTR_LEVEL: Final = "level"
@ -55,5 +55,18 @@ CONDITION_MAP = {
for cond_ha, cond_codes in CONDITION_CLASSES.items()
for cond_code in cond_codes
}
AIR_QUALITY_CATEGORY_MAP = {
1: "good",
2: "moderate",
3: "unhealthy",
4: "very_unhealthy",
5: "hazardous",
}
POLLEN_CATEGORY_MAP = {
1: "low",
2: "moderate",
3: "high",
4: "very_high",
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)

View File

@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
async with timeout(10):
result = await self.accuweather.async_get_current_conditions()
except EXCEPTIONS as error:
raise UpdateFailed(error) from error
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="current_conditions_update_error",
translation_placeholders={"error": repr(error)},
) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
@ -117,9 +121,15 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
"""Update data via library."""
try:
async with timeout(10):
result = await self.accuweather.async_get_daily_forecast()
result = await self.accuweather.async_get_daily_forecast(
language=self.hass.config.language
)
except EXCEPTIONS as error:
raise UpdateFailed(error) from error
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="forecast_update_error",
translation_placeholders={"error": repr(error)},
) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)

View File

@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.1.0"],
"requirements": ["accuweather==4.2.0"],
"single_config_entry": true
}

View File

@ -29,8 +29,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
AIR_QUALITY_CATEGORY_MAP,
API_METRIC,
ATTR_CATEGORY,
ATTR_CATEGORY_VALUE,
ATTR_DIRECTION,
ATTR_ENGLISH,
ATTR_LEVEL,
@ -38,6 +39,7 @@ from .const import (
ATTR_VALUE,
ATTRIBUTION,
MAX_FORECAST_DAYS,
POLLEN_CATEGORY_MAP,
)
from .coordinator import (
AccuWeatherConfigEntry,
@ -59,9 +61,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
AccuWeatherSensorDescription(
key="AirQuality",
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]],
device_class=SensorDeviceClass.ENUM,
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
options=list(AIR_QUALITY_CATEGORY_MAP.values()),
translation_key="air_quality",
),
AccuWeatherSensorDescription(
@ -83,7 +85,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
attr_fn=lambda data: {
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
},
translation_key="grass_pollen",
),
AccuWeatherSensorDescription(
@ -107,7 +111,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
attr_fn=lambda data: {
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
},
translation_key="mold_pollen",
),
AccuWeatherSensorDescription(
@ -115,7 +121,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
attr_fn=lambda data: {
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
},
translation_key="ragweed_pollen",
),
AccuWeatherSensorDescription(
@ -181,14 +189,18 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
attr_fn=lambda data: {
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
},
translation_key="tree_pollen",
),
AccuWeatherSensorDescription(
key="UVIndex",
native_unit_of_measurement=UV_INDEX,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
attr_fn=lambda data: {
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
},
translation_key="uv_index_forecast",
),
AccuWeatherSensorDescription(

View File

@ -26,10 +26,20 @@
"state": {
"good": "Good",
"hazardous": "Hazardous",
"high": "High",
"low": "Low",
"moderate": "Moderate",
"unhealthy": "Unhealthy"
"unhealthy": "Unhealthy",
"very_unhealthy": "Very unhealthy"
},
"state_attributes": {
"options": {
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]",
"very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]"
}
}
}
},
"apparent_temperature": {
@ -62,12 +72,10 @@
"level": {
"name": "Level",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "High",
"low": "Low",
"moderate": "Moderate",
"very_high": "Very high"
}
}
}
@ -81,12 +89,10 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
}
}
}
@ -100,6 +106,15 @@
"steady": "Steady",
"rising": "Rising",
"falling": "Falling"
},
"state_attributes": {
"options": {
"state": {
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
}
}
}
},
"ragweed_pollen": {
@ -108,12 +123,10 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
}
}
}
@ -154,12 +167,10 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
}
}
}
@ -170,12 +181,10 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
}
}
}
@ -186,12 +195,10 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
}
}
}
@ -222,6 +229,14 @@
}
}
},
"exceptions": {
"current_conditions_update_error": {
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
},
"forecast_update_error": {
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
}
},
"system_health": {
"info": {
"can_reach_server": "Reach AccuWeather server",

View File

@ -5,14 +5,14 @@
"data": {
"connection_type": "Select connection type"
},
"description": "Select connection type. Local requires heaters with bluetooth"
"description": "Select connection type. Local requires heaters with Bluetooth"
},
"local": {
"data": {
"wifi_ssid": "Wi-Fi SSID",
"wifi_pswd": "Wi-Fi Password"
"wifi_pswd": "Wi-Fi password"
},
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes."
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes."
},
"cloud": {
"data": {

View File

@ -2,6 +2,7 @@
from __future__ import annotations
from decimal import Decimal
import logging
from typing import Any
@ -14,6 +15,7 @@ from homeassistant.components.climate import (
FAN_MEDIUM,
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
@ -49,6 +51,14 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
ADVANTAGE_AIR_MYFAN = "autoAA"
ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode"
HVAC_ACTIONS = {
"cool": HVACAction.COOLING,
"heat": HVACAction.HEATING,
"vent": HVACAction.FAN,
"dry": HVACAction.DRYING,
}
HVAC_MODES = [
HVACMode.OFF,
@ -175,6 +185,17 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
return HVACMode.OFF
@property
def hvac_action(self) -> HVACAction | None:
"""Return the current running HVAC action."""
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
return HVACAction.OFF
if self._ac["mode"] == "myauto":
return HVAC_ACTIONS.get(
self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF)
)
return HVAC_ACTIONS.get(self._ac["mode"])
@property
def fan_mode(self) -> str | None:
"""Return the current fan modes."""
@ -273,6 +294,22 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
return HVACMode.HEAT_COOL
return HVACMode.OFF
@property
def hvac_action(self) -> HVACAction | None:
"""Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%."""
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
return HVACAction.OFF
master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF)
if self._ac["mode"] == "myauto":
master_action = HVAC_ACTIONS.get(
str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF
)
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
if self._zone["value"] <= Decimal(5):
return HVACAction.IDLE
return master_action
return HVACAction.OFF
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""

View File

@ -7,3 +7,4 @@ ADVANTAGE_AIR_STATE_CLOSE = "close"
ADVANTAGE_AIR_STATE_ON = "on"
ADVANTAGE_AIR_STATE_OFF = "off"
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled"

View File

@ -41,7 +41,7 @@ async def async_setup_entry(
entities.append(
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
)
elif thing["channelDipState"] == 3: # 3 = "Garage door"
elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door"
entities.append(
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
)

View File

@ -9,6 +9,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AdvantageAirDataConfigEntry
from .const import (
ADVANTAGE_AIR_AUTOFAN_ENABLED,
ADVANTAGE_AIR_NIGHT_MODE_ENABLED,
ADVANTAGE_AIR_STATE_OFF,
ADVANTAGE_AIR_STATE_ON,
)
@ -32,6 +33,8 @@ async def async_setup_entry(
entities.append(AdvantageAirFreshAir(instance, ac_key))
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
entities.append(AdvantageAirMyFan(instance, ac_key))
if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]:
entities.append(AdvantageAirNightMode(instance, ac_key))
if things := instance.coordinator.data.get("myThings"):
entities.extend(
AdvantageAirRelay(instance, thing)
@ -93,6 +96,32 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity):
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity):
"""Representation of Advantage 'MySleep$aver' Mode control."""
_attr_icon = "mdi:weather-night"
_attr_name = "MySleep$aver"
_attr_device_class = SwitchDeviceClass.SWITCH
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
"""Initialize an Advantage Air Night Mode control."""
super().__init__(instance, ac_key)
self._attr_unique_id += "-nightmode"
@property
def is_on(self) -> bool:
"""Return the Night Mode status."""
return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED]
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn Night Mode on."""
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True})
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn Night Mode off."""
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False})
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
"""Representation of Advantage Air Thing."""

View File

@ -51,7 +51,7 @@
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"title": "The {integration_title} YAML configuration import failed",
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
"description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
}
}
}

View File

@ -11,7 +11,7 @@
}
},
"discovery_confirm": {
"description": "Do you want to setup {model}?"
"description": "Do you want to set up {model}?"
}
},
"abort": {

View File

@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
try:
await measurements.update()
except (AirlyError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"entry": self.config_entry.title,
"error": repr(error),
},
) from error
_LOGGER.debug(
"Requests remaining: %s/%s",
@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
standards = measurements.current["standards"]
if index["description"] == NO_AIRLY_SENSORS:
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="no_station",
translation_placeholders={"entry": self.config_entry.title},
)
for value in values:
data[value["name"]] = value["value"]
for standard in standards:

View File

@ -36,5 +36,13 @@
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
}
}
},
"exceptions": {
"update_error": {
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
},
"no_station": {
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
}
}
}

View File

@ -8,7 +8,7 @@ from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from pyairnow import WebServiceAPI
from pyairnow.conv import aqi_to_concentration
from pyairnow.errors import AirNowError
from pyairnow.errors import AirNowError, InvalidJsonError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
distance=self.distance,
)
except (AirNowError, ClientConnectorError) as error:
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
raise UpdateFailed(error) from error
if not obs:

View File

@ -7,7 +7,7 @@
"api_key": "[%key:common::config_flow::data::api_key%]",
"latitude": "[%key:common::config_flow::data::latitude%]",
"longitude": "[%key:common::config_flow::data::longitude%]",
"radius": "Station Radius (miles; optional)"
"radius": "Station radius (miles; optional)"
}
}
},
@ -25,7 +25,7 @@
"step": {
"init": {
"data": {
"radius": "Station Radius (miles)"
"radius": "Station radius (miles)"
}
}
}

View File

@ -91,7 +91,7 @@
"name": "Hydrogen fluoride"
},
"health_index": {
"name": "Health Index"
"name": "Health index"
},
"absolute_humidity": {
"name": "Absolute humidity"
@ -112,10 +112,10 @@
"name": "Oxygen"
},
"performance_index": {
"name": "Performance Index"
"name": "Performance index"
},
"hydrogen_phosphide": {
"name": "Hydrogen Phosphide"
"name": "Hydrogen phosphide"
},
"relative_pressure": {
"name": "Relative pressure"
@ -127,22 +127,22 @@
"name": "Refrigerant"
},
"silicon_hydride": {
"name": "Silicon Hydride"
"name": "Silicon hydride"
},
"noise": {
"name": "Noise"
},
"maximum_noise": {
"name": "Noise (Maximum)"
"name": "Noise (maximum)"
},
"radon": {
"name": "Radon"
},
"industrial_volatile_organic_compounds": {
"name": "VOCs (Industrial)"
"name": "VOCs (industrial)"
},
"virus_index": {
"name": "Virus Index"
"name": "Virus index"
}
}
}

View File

@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
device = await self._get_device_data(discovery_info)
except AirthingsDeviceUpdateError:
return self.async_abort(reason="cannot_connect")
except Exception: # noqa: BLE001
except Exception:
_LOGGER.exception("Unknown error occurred")
return self.async_abort(reason="unknown")
name = get_name(device)
@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
device = await self._get_device_data(discovery_info)
except AirthingsDeviceUpdateError:
return self.async_abort(reason="cannot_connect")
except Exception: # noqa: BLE001
except Exception:
_LOGGER.exception("Unknown error occurred")
return self.async_abort(reason="unknown")
name = get_name(device)
self._discovered_devices[address] = Discovery(name, discovery_info, device)

View File

@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
client = Airtouch5SimpleClient(user_input[CONF_HOST])
try:
await client.test_connection()
except Exception: # noqa: BLE001
except Exception:
_LOGGER.exception("Unexpected exception")
errors = {"base": "cannot_connect"}
else:
await self.async_set_unique_id(user_input[CONF_HOST])

View File

@ -2,7 +2,7 @@
"config": {
"step": {
"geography_by_coords": {
"title": "Configure a Geography",
"title": "Configure a geography",
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
@ -56,12 +56,12 @@
"sensor": {
"pollutant_label": {
"state": {
"co": "Carbon Monoxide",
"n2": "Nitrogen Dioxide",
"co": "Carbon monoxide",
"n2": "Nitrogen dioxide",
"o3": "Ozone",
"p1": "PM10",
"p2": "PM2.5",
"s2": "Sulfur Dioxide"
"s2": "Sulfur dioxide"
}
},
"pollutant_level": {

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.6.10"]
"requirements": ["aioairzone-cloud==0.6.11"]
}

View File

@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
# Fan preset_mode
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
return f"{fan.ATTR_PRESET_MODE}.{mode}"
# Humidifier mode

View File

@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = (
suggested_display_precision=0,
entity_registry_enabled_default=False,
device_class=SensorDeviceClass.WIND_DIRECTION,
state_class=SensorStateClass.MEASUREMENT_ANGLE,
),
SensorEntityDescription(
key=TYPE_WINDGUSTMPH,

View File

@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="wind_direction",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
state_class=SensorStateClass.MEASUREMENT_ANGLE,
),
SensorEntityDescription(
key=TYPE_WINDDIR_AVG10M,

View File

@ -8,7 +8,7 @@ from python_homeassistant_analytics import (
HomeassistantAnalyticsClient,
HomeassistantAnalyticsConnectionError,
)
from python_homeassistant_analytics.models import IntegrationType
from python_homeassistant_analytics.models import Environment, IntegrationType
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
)
try:
addons = await client.get_addons()
integrations = await client.get_integrations()
integrations = await client.get_integrations(Environment.NEXT)
custom_integrations = await client.get_custom_integrations()
except HomeassistantAnalyticsConnectionError:
LOGGER.exception("Error connecting to Home Assistant analytics")
@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
)
try:
addons = await client.get_addons()
integrations = await client.get_integrations()
integrations = await client.get_integrations(Environment.NEXT)
custom_integrations = await client.get_custom_integrations()
except HomeassistantAnalyticsConnectionError:
LOGGER.exception("Error connecting to Home Assistant analytics")

View File

@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["androidtvremote2"],
"requirements": ["androidtvremote2==0.2.0"],
"requirements": ["androidtvremote2==0.2.1"],
"zeroconf": ["_androidtvremote2._tcp.local."]
}

View File

@ -2,6 +2,8 @@
from __future__ import annotations
import logging
from anova_wifi import AnovaApi, InvalidLogin
import voluptuous as vol
@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
"""Sets up a config flow for Anova."""
VERSION = 1
@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
await api.authenticate()
except InvalidLogin:
errors["base"] = "invalid_auth"
except Exception: # noqa: BLE001
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(

View File

@ -22,6 +22,7 @@ from . import AnthemavConfigEntry
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
VOLUME_STEP = 0.01
async def async_setup_entry(
@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity):
| MediaPlayerEntityFeature.TURN_OFF
| MediaPlayerEntityFeature.SELECT_SOURCE
)
_attr_volume_step = VOLUME_STEP
def __init__(
self,

View File

@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from .const import DOMAIN, LOGGER
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
PLATFORMS = (Platform.CONVERSATION,)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
@ -26,12 +26,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
)
try:
await client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=1,
messages=[{"role": "user", "content": "Hi"}],
timeout=10.0,
)
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
LOGGER.debug("Anthropic model: %s", model.display_name)
except anthropic.AuthenticationError as err:
LOGGER.error("Invalid API key: %s", err)
return False

View File

@ -34,10 +34,12 @@ from .const import (
CONF_PROMPT,
CONF_RECOMMENDED,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
DOMAIN,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET,
)
_LOGGER = logging.getLogger(__name__)
@ -63,12 +65,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
client = await hass.async_add_executor_job(
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
)
await client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=1,
messages=[{"role": "user", "content": "Hi"}],
timeout=10.0,
)
await client.models.list(timeout=10.0)
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
@ -133,21 +130,29 @@ class AnthropicOptionsFlow(OptionsFlow):
) -> ConfigFlowResult:
"""Manage the options."""
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
errors: dict[str, str] = {}
if user_input is not None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if user_input[CONF_LLM_HASS_API] == "none":
user_input.pop(CONF_LLM_HASS_API)
return self.async_create_entry(title="", data=user_input)
# Re-render the options again, now with the recommended options shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
if user_input.get(
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
}
if not errors:
return self.async_create_entry(title="", data=user_input)
else:
# Re-render the options again, now with the recommended options shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
}
suggested_values = options.copy()
if not suggested_values.get(CONF_PROMPT):
@ -161,6 +166,7 @@ class AnthropicOptionsFlow(OptionsFlow):
return self.async_show_form(
step_id="init",
data_schema=schema,
errors=errors or None,
)
@ -210,6 +216,10 @@ def anthropic_config_option_schema(
CONF_TEMPERATURE,
default=RECOMMENDED_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_THINKING_BUDGET,
default=RECOMMENDED_THINKING_BUDGET,
): int,
}
)
return schema

View File

@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens"
RECOMMENDED_MAX_TOKENS = 1024
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_THINKING_BUDGET = "thinking_budget"
RECOMMENDED_THINKING_BUDGET = 0
MIN_THINKING_BUDGET = 1024
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]

View File

@ -1,23 +1,32 @@
"""Conversation support for Anthropic."""
from collections.abc import AsyncGenerator, Callable
from collections.abc import AsyncGenerator, Callable, Iterable
import json
from typing import Any, Literal
from typing import Any, Literal, cast
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
Message,
MessageParam,
MessageStreamEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
RedactedThinkingBlockParam,
SignatureDelta,
TextBlock,
TextBlockParam,
TextDelta,
ThinkingBlock,
ThinkingBlockParam,
ThinkingConfigDisabledParam,
ThinkingConfigEnabledParam,
ThinkingDelta,
ToolParam,
ToolResultBlockParam,
ToolUseBlock,
@ -30,7 +39,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import chat_session, device_registry as dr, intent, llm
from homeassistant.helpers import device_registry as dr, intent, llm
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AnthropicConfigEntry
@ -39,11 +48,15 @@ from .const import (
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
DOMAIN,
LOGGER,
MIN_THINKING_BUDGET,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET,
THINKING_MODELS,
)
# Max number of back and forth with the LLM to generate a response
@ -71,73 +84,101 @@ def _format_tool(
)
def _message_convert(
message: Message,
) -> MessageParam:
"""Convert from class to TypedDict."""
param_content: list[TextBlockParam | ToolUseBlockParam] = []
def _convert_content(
chat_content: Iterable[conversation.Content],
) -> list[MessageParam]:
"""Transform HA chat_log content into Anthropic API format."""
messages: list[MessageParam] = []
for message_content in message.content:
if isinstance(message_content, TextBlock):
param_content.append(TextBlockParam(type="text", text=message_content.text))
elif isinstance(message_content, ToolUseBlock):
param_content.append(
ToolUseBlockParam(
type="tool_use",
id=message_content.id,
name=message_content.name,
input=message_content.input,
)
for content in chat_content:
if isinstance(content, conversation.ToolResultContent):
tool_result_block = ToolResultBlockParam(
type="tool_result",
tool_use_id=content.tool_call_id,
content=json.dumps(content.tool_result),
)
return MessageParam(role=message.role, content=param_content)
def _convert_content(chat_content: conversation.Content) -> MessageParam:
"""Create tool response content."""
if isinstance(chat_content, conversation.ToolResultContent):
return MessageParam(
role="user",
content=[
ToolResultBlockParam(
type="tool_result",
tool_use_id=chat_content.tool_call_id,
content=json.dumps(chat_content.tool_result),
)
],
)
if isinstance(chat_content, conversation.AssistantContent):
return MessageParam(
role="assistant",
content=[
TextBlockParam(type="text", text=chat_content.content or ""),
*[
ToolUseBlockParam(
type="tool_use",
id=tool_call.id,
name=tool_call.tool_name,
input=tool_call.tool_args,
if not messages or messages[-1]["role"] != "user":
messages.append(
MessageParam(
role="user",
content=[tool_result_block],
)
for tool_call in chat_content.tool_calls or ()
],
],
)
if isinstance(chat_content, conversation.UserContent):
return MessageParam(
role="user",
content=chat_content.content,
)
# Note: We don't pass SystemContent here as its passed to the API as the prompt
raise ValueError(f"Unexpected content type: {type(chat_content)}")
)
elif isinstance(messages[-1]["content"], str):
messages[-1]["content"] = [
TextBlockParam(type="text", text=messages[-1]["content"]),
tool_result_block,
]
else:
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
elif isinstance(content, conversation.UserContent):
# Combine consequent user messages
if not messages or messages[-1]["role"] != "user":
messages.append(
MessageParam(
role="user",
content=content.content,
)
)
elif isinstance(messages[-1]["content"], str):
messages[-1]["content"] = [
TextBlockParam(type="text", text=messages[-1]["content"]),
TextBlockParam(type="text", text=content.content),
]
else:
messages[-1]["content"].append( # type: ignore[attr-defined]
TextBlockParam(type="text", text=content.content)
)
elif isinstance(content, conversation.AssistantContent):
# Combine consequent assistant messages
if not messages or messages[-1]["role"] != "assistant":
messages.append(
MessageParam(
role="assistant",
content=[],
)
)
if content.content:
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(type="text", text=content.content)
)
if content.tool_calls:
messages[-1]["content"].extend( # type: ignore[union-attr]
[
ToolUseBlockParam(
type="tool_use",
id=tool_call.id,
name=tool_call.tool_name,
input=tool_call.tool_args,
)
for tool_call in content.tool_calls
]
)
else:
# Note: We don't pass SystemContent here as its passed to the API as the prompt
raise TypeError(f"Unexpected content type: {type(content)}")
return messages
async def _transform_stream(
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
A typical stream of responses might look something like the following:
- RawMessageStartEvent with no content
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
- RawContentBlockDeltaEvent with a ThinkingDelta
- RawContentBlockDeltaEvent with a ThinkingDelta
- RawContentBlockDeltaEvent with a ThinkingDelta
- ...
- RawContentBlockDeltaEvent with a SignatureDelta
- RawContentBlockStopEvent
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
- RawContentBlockStartEvent with an empty TextBlock
- RawContentBlockDeltaEvent with a TextDelta
- RawContentBlockDeltaEvent with a TextDelta
@ -151,44 +192,103 @@ async def _transform_stream(
- RawContentBlockStopEvent
- RawMessageDeltaEvent with a stop_reason='tool_use'
- RawMessageStopEvent(type='message_stop')
Each message could contain multiple blocks of the same type.
"""
if result is None:
raise TypeError("Expected a stream of messages")
current_tool_call: dict | None = None
current_message: MessageParam | None = None
current_block: (
TextBlockParam
| ToolUseBlockParam
| ThinkingBlockParam
| RedactedThinkingBlockParam
| None
) = None
current_tool_args: str
async for response in result:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawContentBlockStartEvent):
if isinstance(response, RawMessageStartEvent):
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_tool_call = {
"id": response.content_block.id,
"name": response.content_block.name,
"input": "",
}
current_block = ToolUseBlockParam(
type="tool_use",
id=response.content_block.id,
name=response.content_block.name,
input="",
)
current_tool_args = ""
elif isinstance(response.content_block, TextBlock):
current_block = TextBlockParam(
type="text", text=response.content_block.text
)
yield {"role": "assistant"}
if response.content_block.text:
yield {"content": response.content_block.text}
elif isinstance(response.content_block, ThinkingBlock):
current_block = ThinkingBlockParam(
type="thinking",
thinking=response.content_block.thinking,
signature=response.content_block.signature,
)
elif isinstance(response.content_block, RedactedThinkingBlock):
current_block = RedactedThinkingBlockParam(
type="redacted_thinking", data=response.content_block.data
)
LOGGER.debug(
"Some of Claudes internal reasoning has been automatically "
"encrypted for safety reasons. This doesnt affect the quality of "
"responses"
)
elif isinstance(response, RawContentBlockDeltaEvent):
if current_block is None:
raise ValueError("Unexpected delta without a block")
if isinstance(response.delta, InputJSONDelta):
if current_tool_call is None:
raise ValueError("Unexpected delta without a tool call")
current_tool_call["input"] += response.delta.partial_json
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
LOGGER.debug("yielding delta: %s", response.delta.text)
text_block = cast(TextBlockParam, current_block)
text_block["text"] += response.delta.text
yield {"content": response.delta.text}
elif isinstance(response.delta, ThinkingDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["thinking"] += response.delta.thinking
elif isinstance(response.delta, SignatureDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["signature"] += response.delta.signature
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_call:
if current_block is None:
raise ValueError("Unexpected stop event without a current block")
if current_block["type"] == "tool_use":
tool_block = cast(ToolUseBlockParam, current_block)
tool_args = json.loads(current_tool_args)
tool_block["input"] = tool_args
yield {
"tool_calls": [
llm.ToolInput(
id=current_tool_call["id"],
tool_name=current_tool_call["name"],
tool_args=json.loads(current_tool_call["input"]),
id=tool_block["id"],
tool_name=tool_block["name"],
tool_args=tool_args,
)
]
}
current_tool_call = None
elif current_block["type"] == "thinking":
thinking_block = cast(ThinkingBlockParam, current_block)
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
if current_message is None:
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
class AnthropicConversationEntity(
@ -226,18 +326,6 @@ class AnthropicConversationEntity(
self.entry.add_update_listener(self._async_entry_update_listener)
)
async def async_process(
self, user_input: conversation.ConversationInput
) -> conversation.ConversationResult:
"""Process a sentence."""
with (
chat_session.async_get_chat_session(
self.hass, user_input.conversation_id
) as session,
conversation.async_get_chat_log(self.hass, session, user_input) as chat_log,
):
return await self._async_handle_message(user_input, chat_log)
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
@ -266,34 +354,50 @@ class AnthropicConversationEntity(
system = chat_log.content[0]
if not isinstance(system, conversation.SystemContent):
raise TypeError("First message must be a system message")
messages = [_convert_content(content) for content in chat_log.content[1:]]
messages = _convert_content(chat_log.content[1:])
client = self.entry.runtime_data
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
stream = await client.messages.create(
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
messages=messages,
tools=tools or NOT_GIVEN,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
stream=True,
model_args = {
"model": model,
"messages": messages,
"tools": tools or NOT_GIVEN,
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
"system": system.content,
"stream": True,
}
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
try:
stream = await client.messages.create(**model_args)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
messages.extend(
[
_convert_content(content)
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(stream)
)
]
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(stream, messages)
)
if not isinstance(content, conversation.AssistantContent)
]
)
)
if not chat_log.unresponded_tool_results:
@ -305,7 +409,9 @@ class AnthropicConversationEntity(
intent_response = intent.IntentResponse(language=user_input.language)
intent_response.async_set_speech(response_content.content or "")
return conversation.ConversationResult(
response=intent_response, conversation_id=chat_log.conversation_id
response=intent_response,
conversation_id=chat_log.conversation_id,
continue_conversation=chat_log.continue_conversation,
)
async def _async_entry_update_listener(

View File

@ -23,12 +23,17 @@
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"recommended": "Recommended model settings"
"recommended": "Recommended model settings",
"thinking_budget_tokens": "Thinking budget"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
"prompt": "Instruct how the LLM should respond. This can be a template.",
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
}
}
},
"error": {
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
}
}
}

View File

@ -57,7 +57,7 @@
"name": "Status date"
},
"dip_switch_settings": {
"name": "Dip switch settings"
"name": "DIP switch settings"
},
"low_battery_signal": {
"name": "Low battery signal"

View File

@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyaprilaire"],
"requirements": ["pyaprilaire==0.7.7"]
"requirements": ["pyaprilaire==0.8.1"]
}

View File

@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect"
except AuthenticationFailed:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:

View File

@ -6,7 +6,11 @@ import logging
from typing import Any
from homeassistant.components import mqtt
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
DEGREE,
"mdi:compass",
device_class=SensorDeviceClass.WIND_DIRECTION,
state_class=SensorStateClass.MEASUREMENT_ANGLE,
),
]
return None
@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity):
units: str,
icon: str | None = None,
device_class: SensorDeviceClass | None = None,
state_class: SensorStateClass | None = None,
) -> None:
"""Initialize the sensor."""
self.entity_id = _slug(name)
@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity):
self._attr_native_unit_of_measurement = units
self._attr_icon = icon
self._attr_device_class = device_class
self._attr_state_class = state_class
def set_event(self, event: dict[str, Any]) -> None:
"""Update the sensor with the most recent event."""

View File

@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
"""
with chat_session.async_get_chat_session(hass, conversation_id) as session:
pipeline_input = PipelineInput(
conversation_id=session.conversation_id,
session=session,
device_id=device_id,
stt_metadata=stt_metadata,
stt_stream=stt_stream,

View File

@ -19,14 +19,7 @@ import wave
import hass_nabucasa
import voluptuous as vol
from homeassistant.components import (
conversation,
media_source,
stt,
tts,
wake_word,
websocket_api,
)
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
from homeassistant.components.tts import (
generate_media_source_id as tts_generate_media_source_id,
)
@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = (
)
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
"pipeline_conversation_data"
)
def validate_language(data: dict[str, Any]) -> Any:
@ -129,7 +125,7 @@ SAVE_DELAY = 10
@callback
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
"""Filter out intents that are not local fallback."""
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
return result.intent.name in (intent.INTENT_GET_STATE)
@callback
@ -566,8 +562,7 @@ class PipelineRun:
id: str = field(default_factory=ulid_util.ulid_now)
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
tts_engine: str = field(init=False, repr=False)
tts_options: dict | None = field(init=False, default=None)
tts_stream: tts.ResultStream | None = field(init=False, default=None)
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
@ -590,6 +585,12 @@ class PipelineRun:
_device_id: str | None = None
"""Optional device id set during run start."""
_conversation_data: PipelineConversationData | None = None
"""Data tied to the conversation ID."""
_intent_agent_only = False
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
def __post_init__(self) -> None:
"""Set language for pipeline."""
self.language = self.pipeline.language or self.hass.config.language
@ -639,13 +640,19 @@ class PipelineRun:
self._device_id = device_id
self._start_debug_recording_thread()
data = {
data: dict[str, Any] = {
"pipeline": self.pipeline.id,
"language": self.language,
"conversation_id": conversation_id,
}
if self.runner_data is not None:
data["runner_data"] = self.runner_data
if self.tts_stream:
data["tts_output"] = {
"token": self.tts_stream.token,
"url": self.tts_stream.url,
"mime_type": self.tts_stream.content_type,
}
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
@ -1007,19 +1014,36 @@ class PipelineRun:
yield chunk.audio
async def prepare_recognize_intent(self) -> None:
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
"""Prepare recognizing an intent."""
agent_info = conversation.async_get_agent_info(
self.hass,
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
self._conversation_data = async_get_pipeline_conversation_data(
self.hass, session
)
if agent_info is None:
engine = self.pipeline.conversation_engine or "default"
raise IntentRecognitionError(
code="intent-not-supported",
message=f"Intent recognition engine {engine} is not found",
if self._conversation_data.continue_conversation_agent is not None:
agent_info = conversation.async_get_agent_info(
self.hass, self._conversation_data.continue_conversation_agent
)
self._conversation_data.continue_conversation_agent = None
if agent_info is None:
raise IntentRecognitionError(
code="intent-agent-not-found",
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
)
self._intent_agent_only = True
else:
agent_info = conversation.async_get_agent_info(
self.hass,
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
)
if agent_info is None:
engine = self.pipeline.conversation_engine or "default"
raise IntentRecognitionError(
code="intent-not-supported",
message=f"Intent recognition engine {engine} is not found",
)
self.intent_agent = agent_info.id
@ -1031,7 +1055,7 @@ class PipelineRun:
conversation_extra_system_prompt: str | None,
) -> str:
"""Run intent recognition portion of pipeline. Returns text to speak."""
if self.intent_agent is None:
if self.intent_agent is None or self._conversation_data is None:
raise RuntimeError("Recognize intent was not prepared")
if self.pipeline.conversation_language == MATCH_ALL:
@ -1078,7 +1102,7 @@ class PipelineRun:
agent_id = self.intent_agent
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
intent_response: intent.IntentResponse | None = None
if not processed_locally:
if not processed_locally and not self._intent_agent_only:
# Sentence triggers override conversation agent
if (
trigger_response_text
@ -1195,6 +1219,9 @@ class PipelineRun:
)
)
if conversation_result.continue_conversation:
self._conversation_data.continue_conversation_agent = agent_id
return speech
async def prepare_text_to_speech(self) -> None:
@ -1217,36 +1244,31 @@ class PipelineRun:
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
try:
options_supported = await tts.async_support_options(
self.hass,
engine,
self.pipeline.tts_language,
tts_options,
self.tts_stream = tts.async_create_stream(
hass=self.hass,
engine=engine,
language=self.pipeline.tts_language,
options=tts_options,
)
except HomeAssistantError as err:
raise TextToSpeechError(
code="tts-not-supported",
message=f"Text-to-speech engine '{engine}' not found",
) from err
if not options_supported:
raise TextToSpeechError(
code="tts-not-supported",
message=(
f"Text-to-speech engine {engine} "
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
f" {err}"
),
)
self.tts_engine = engine
self.tts_options = tts_options
) from err
async def text_to_speech(self, tts_input: str) -> None:
"""Run text-to-speech portion of pipeline."""
assert self.tts_stream is not None
self.process_event(
PipelineEvent(
PipelineEventType.TTS_START,
{
"engine": self.tts_engine,
"engine": self.tts_stream.engine,
"language": self.pipeline.tts_language,
"voice": self.pipeline.tts_voice,
"tts_input": tts_input,
@ -1259,14 +1281,9 @@ class PipelineRun:
tts_media_id = tts_generate_media_source_id(
self.hass,
tts_input,
engine=self.tts_engine,
language=self.pipeline.tts_language,
options=self.tts_options,
)
tts_media = await media_source.async_resolve_media(
self.hass,
tts_media_id,
None,
engine=self.tts_stream.engine,
language=self.tts_stream.language,
options=self.tts_stream.options,
)
except Exception as src_error:
_LOGGER.exception("Unexpected error during text-to-speech")
@ -1275,10 +1292,13 @@ class PipelineRun:
message="Unexpected error during text-to-speech",
) from src_error
_LOGGER.debug("TTS result %s", tts_media)
self.tts_stream.async_set_message(tts_input)
tts_output = {
"media_id": tts_media_id,
**asdict(tts_media),
"token": self.tts_stream.token,
"url": self.tts_stream.url,
"mime_type": self.tts_stream.content_type,
}
self.process_event(
@ -1458,8 +1478,8 @@ class PipelineInput:
run: PipelineRun
conversation_id: str
"""Identifier for the conversation."""
session: chat_session.ChatSession
"""Session for the conversation."""
stt_metadata: stt.SpeechMetadata | None = None
"""Metadata of stt input audio. Required when start_stage = stt."""
@ -1484,7 +1504,9 @@ class PipelineInput:
async def execute(self) -> None:
"""Run pipeline."""
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
self.run.start(
conversation_id=self.session.conversation_id, device_id=self.device_id
)
current_stage: PipelineStage | None = self.run.start_stage
stt_audio_buffer: list[EnhancedAudioChunk] = []
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
@ -1568,7 +1590,7 @@ class PipelineInput:
assert intent_input is not None
tts_input = await self.run.recognize_intent(
intent_input,
self.conversation_id,
self.session.conversation_id,
self.device_id,
self.conversation_extra_system_prompt,
)
@ -1652,7 +1674,7 @@ class PipelineInput:
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
<= end_stage_index
):
prepare_tasks.append(self.run.prepare_recognize_intent())
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
if (
start_stage_index
@ -1931,7 +1953,7 @@ class PipelineRunDebug:
class PipelineStore(Store[SerializedPipelineStorageCollection]):
"""Store entity registry data."""
"""Store pipeline data."""
async def _async_migrate_func(
self,
@ -2013,3 +2035,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
for pipeline, attr_updates in updates:
await async_update_pipeline(hass, pipeline, **attr_updates)
@dataclass
class PipelineConversationData:
"""Hold data for the duration of a conversation."""
continue_conversation_agent: str | None = None
"""The agent that requested the conversation to be continued."""
@callback
def async_get_pipeline_conversation_data(
hass: HomeAssistant, session: chat_session.ChatSession
) -> PipelineConversationData:
"""Get the pipeline data for a specific conversation."""
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
if all_conversation_data is None:
all_conversation_data = {}
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
data = all_conversation_data.get(session.conversation_id)
if data is not None:
return data
@callback
def do_cleanup() -> None:
"""Handle cleanup."""
all_conversation_data.pop(session.conversation_id)
session.async_on_cleanup(do_cleanup)
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
return data

View File

@ -239,7 +239,7 @@ async def websocket_run(
with chat_session.async_get_chat_session(
hass, msg.get("conversation_id")
) as session:
input_args["conversation_id"] = session.conversation_id
input_args["session"] = session
pipeline_input = PipelineInput(**input_args)
try:

View File

@ -1,9 +1,11 @@
"""Base class for assist satellite entities."""
import logging
from pathlib import Path
import voluptuous as vol
from homeassistant.components.http import StaticPathConfig
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
@ -15,6 +17,8 @@ from .const import (
CONNECTION_TEST_DATA,
DATA_COMPONENT,
DOMAIN,
PREANNOUNCE_FILENAME,
PREANNOUNCE_URL,
AssistSatelliteEntityFeature,
)
from .entity import (
@ -56,6 +60,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
{
vol.Optional("message"): str,
vol.Optional("media_id"): str,
vol.Optional("preannounce"): bool,
vol.Optional("preannounce_media_id"): str,
}
),
cv.has_at_least_one_key("message", "media_id"),
@ -70,6 +76,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
{
vol.Optional("start_message"): str,
vol.Optional("start_media_id"): str,
vol.Optional("preannounce"): bool,
vol.Optional("preannounce_media_id"): str,
vol.Optional("extra_system_prompt"): str,
}
),
@ -82,6 +90,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_register_websocket_api(hass)
hass.http.register_view(ConnectionTestView())
# Default preannounce sound
await hass.http.async_register_static_paths(
[
StaticPathConfig(
PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME)
)
]
)
return True

View File

@ -20,6 +20,9 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey(
f"{DOMAIN}_connection_tests"
)
PREANNOUNCE_FILENAME = "preannounce.mp3"
PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}"
class AssistSatelliteEntityFeature(IntFlag):
"""Supported features of Assist satellite entity."""

View File

@ -23,15 +23,12 @@ from homeassistant.components.assist_pipeline import (
vad,
)
from homeassistant.components.media_player import async_process_play_media_url
from homeassistant.components.tts import (
generate_media_source_id as tts_generate_media_source_id,
)
from homeassistant.core import Context, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import chat_session, entity
from homeassistant.helpers.entity import EntityDescription
from .const import AssistSatelliteEntityFeature
from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature
from .errors import AssistSatelliteError, SatelliteBusyError
_LOGGER = logging.getLogger(__name__)
@ -98,9 +95,15 @@ class AssistSatelliteAnnouncement:
original_media_id: str
"""The raw media ID before processing."""
tts_token: str | None
"""The TTS token of the media."""
media_id_source: Literal["url", "media_id", "tts"]
"""Source of the media ID."""
preannounce_media_id: str | None = None
"""Media ID to be played before announcement."""
class AssistSatelliteEntity(entity.Entity):
"""Entity encapsulating the state and functionality of an Assist satellite."""
@ -177,6 +180,8 @@ class AssistSatelliteEntity(entity.Entity):
self,
message: str | None = None,
media_id: str | None = None,
preannounce: bool = True,
preannounce_media_id: str = PREANNOUNCE_URL,
) -> None:
"""Play and show an announcement on the satellite.
@ -186,6 +191,9 @@ class AssistSatelliteEntity(entity.Entity):
If media_id is provided, it is played directly. It is possible
to omit the message and the satellite will not show any text.
If preannounce is True, a sound is played before the announcement.
If preannounce_media_id is provided, it overrides the default sound.
Calls async_announce with message and media id.
"""
await self._cancel_running_pipeline()
@ -193,7 +201,11 @@ class AssistSatelliteEntity(entity.Entity):
if message is None:
message = ""
announcement = await self._resolve_announcement_media_id(message, media_id)
announcement = await self._resolve_announcement_media_id(
message,
media_id,
preannounce_media_id=preannounce_media_id if preannounce else None,
)
if self._is_announcing:
raise SatelliteBusyError
@ -220,6 +232,8 @@ class AssistSatelliteEntity(entity.Entity):
start_message: str | None = None,
start_media_id: str | None = None,
extra_system_prompt: str | None = None,
preannounce: bool = True,
preannounce_media_id: str = PREANNOUNCE_URL,
) -> None:
"""Start a conversation from the satellite.
@ -229,6 +243,9 @@ class AssistSatelliteEntity(entity.Entity):
If start_media_id is provided, it is played directly. It is possible
to omit the message and the satellite will not show any text.
If preannounce is True, a sound is played before the start message or media.
If preannounce_media_id is provided, it overrides the default sound.
Calls async_start_conversation.
"""
await self._cancel_running_pipeline()
@ -244,13 +261,17 @@ class AssistSatelliteEntity(entity.Entity):
start_message = ""
announcement = await self._resolve_announcement_media_id(
start_message, start_media_id
start_message,
start_media_id,
preannounce_media_id=preannounce_media_id if preannounce else None,
)
if self._is_announcing:
raise SatelliteBusyError
self._is_announcing = True
self._set_state(AssistSatelliteState.RESPONDING)
# Provide our start info to the LLM so it understands context of incoming message
if extra_system_prompt is not None:
self._extra_system_prompt = extra_system_prompt
@ -280,6 +301,7 @@ class AssistSatelliteEntity(entity.Entity):
raise
finally:
self._is_announcing = False
self._set_state(AssistSatelliteState.IDLE)
async def async_start_conversation(
self, start_announcement: AssistSatelliteAnnouncement
@ -470,20 +492,27 @@ class AssistSatelliteEntity(entity.Entity):
return vad.VadSensitivity.to_seconds(vad_sensitivity)
async def _resolve_announcement_media_id(
self, message: str, media_id: str | None
self,
message: str,
media_id: str | None,
preannounce_media_id: str | None = None,
) -> AssistSatelliteAnnouncement:
"""Resolve the media ID."""
media_id_source: Literal["url", "media_id", "tts"] | None = None
tts_token: str | None = None
if media_id:
original_media_id = media_id
else:
media_id_source = "tts"
# Synthesize audio and get URL
pipeline_id = self._resolve_pipeline()
pipeline = async_get_pipeline(self.hass, pipeline_id)
engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine)
if engine is None:
raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found")
tts_options: dict[str, Any] = {}
if pipeline.tts_voice is not None:
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
@ -491,14 +520,23 @@ class AssistSatelliteEntity(entity.Entity):
if self.tts_options is not None:
tts_options.update(self.tts_options)
media_id = tts_generate_media_source_id(
stream = tts.async_create_stream(
self.hass,
message,
engine=pipeline.tts_engine,
engine=engine,
language=pipeline.tts_language,
options=tts_options,
)
stream.async_set_message(message)
tts_token = stream.token
media_id = stream.url
original_media_id = tts.generate_media_source_id(
self.hass,
message,
engine=engine,
language=pipeline.tts_language,
options=tts_options,
)
original_media_id = media_id
if media_source.is_media_source_id(media_id):
if not media_id_source:
@ -516,6 +554,26 @@ class AssistSatelliteEntity(entity.Entity):
# Resolve to full URL
media_id = async_process_play_media_url(self.hass, media_id)
# Resolve preannounce media id
if preannounce_media_id:
if media_source.is_media_source_id(preannounce_media_id):
preannounce_media = await media_source.async_resolve_media(
self.hass,
preannounce_media_id,
None,
)
preannounce_media_id = preannounce_media.url
# Resolve to full URL
preannounce_media_id = async_process_play_media_url(
self.hass, preannounce_media_id
)
return AssistSatelliteAnnouncement(
message, media_id, original_media_id, media_id_source
message=message,
media_id=media_id,
original_media_id=original_media_id,
tts_token=tts_token,
media_id_source=media_id_source,
preannounce_media_id=preannounce_media_id,
)

View File

@ -8,12 +8,22 @@ announce:
message:
required: false
example: "Time to wake up!"
default: ""
selector:
text:
media_id:
required: false
selector:
text:
preannounce:
required: false
default: true
selector:
boolean:
preannounce_media_id:
required: false
selector:
text:
start_conversation:
target:
entity:
@ -24,6 +34,7 @@ start_conversation:
start_message:
required: false
example: "You left the lights on in the living room. Turn them off?"
default: ""
selector:
text:
start_media_id:
@ -34,3 +45,12 @@ start_conversation:
required: false
selector:
text:
preannounce:
required: false
default: true
selector:
boolean:
preannounce_media_id:
required: false
selector:
text:

View File

@ -23,6 +23,14 @@
"media_id": {
"name": "Media ID",
"description": "The media ID to announce instead of using text-to-speech."
},
"preannounce": {
"name": "Preannounce",
"description": "Play a sound before the announcement."
},
"preannounce_media_id": {
"name": "Preannounce media ID",
"description": "Custom media ID to play before the announcement."
}
}
},
@ -41,6 +49,14 @@
"extra_system_prompt": {
"name": "Extra system prompt",
"description": "Provide background information to the AI about the request."
},
"preannounce": {
"name": "Preannounce",
"description": "Play a sound before the start message or media."
},
"preannounce_media_id": {
"name": "Preannounce media ID",
"description": "Custom media ID to play before the start message or media."
}
}
}

View File

@ -198,7 +198,8 @@ async def websocket_test_connection(
hass.async_create_background_task(
satellite.async_internal_announce(
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}"
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
preannounce=False,
),
f"assist_satellite_connection_test_{msg['entity_id']}",
)

View File

@ -66,28 +66,28 @@
"name": "Upload"
},
"load_avg_1m": {
"name": "Average load (1m)"
"name": "Average load (1 min)"
},
"load_avg_5m": {
"name": "Average load (5m)"
"name": "Average load (5 min)"
},
"load_avg_15m": {
"name": "Average load (15m)"
"name": "Average load (15 min)"
},
"24ghz_temperature": {
"name": "2.4GHz Temperature"
"name": "2.4GHz temperature"
},
"5ghz_temperature": {
"name": "5GHz Temperature"
"name": "5GHz temperature"
},
"cpu_temperature": {
"name": "CPU Temperature"
"name": "CPU temperature"
},
"5ghz_2_temperature": {
"name": "5GHz Temperature (Radio 2)"
"name": "5GHz temperature (Radio 2)"
},
"6ghz_temperature": {
"name": "6GHz Temperature"
"name": "6GHz temperature"
},
"cpu_usage": {
"name": "CPU usage"

View File

@ -14,7 +14,7 @@
"personal_access_token": "Personal Access Token (PAT)"
},
"description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.",
"title": "Add Azure DevOps Project"
"title": "Add Azure DevOps project"
},
"reauth_confirm": {
"data": {
@ -32,7 +32,7 @@
"entity": {
"sensor": {
"build_id": {
"name": "{definition_name} latest build id"
"name": "{definition_name} latest build ID"
},
"finish_time": {
"name": "{definition_name} latest build finish time"
@ -59,7 +59,7 @@
"name": "{definition_name} latest build start time"
},
"url": {
"name": "{definition_name} latest build url"
"name": "{definition_name} latest build URL"
},
"work_item_count": {
"name": "{item_type} {item_state} work items"
@ -68,7 +68,7 @@
},
"exceptions": {
"authentication_failed": {
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token."
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token."
}
}
}

View File

@ -13,7 +13,11 @@ from azure.storage.blob.aio import ContainerClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import (
@ -52,7 +56,7 @@ async def async_setup_entry(
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
except ClientAuthenticationError as err:
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},

View File

@ -141,7 +141,7 @@ class AzureStorageBackupAgent(BackupAgent):
"""Delete a backup file."""
blob = await self._find_blob_by_backup_id(backup_id)
if blob is None:
return
raise BackupNotFound(f"Backup {backup_id} not found")
await self._client.delete_blob(blob.name)
@handle_backup_errors
@ -163,11 +163,11 @@ class AzureStorageBackupAgent(BackupAgent):
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup | None:
) -> AgentBackup:
"""Return a backup."""
blob = await self._find_blob_by_backup_id(backup_id)
if blob is None:
return None
raise BackupNotFound(f"Backup {backup_id} not found")
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent):
"""Find a blob by backup id."""
async for blob in self._client.list_blobs(include="metadata"):
if (
backup_id == blob.metadata.get("backup_id", "")
blob.metadata is not None
and backup_id == blob.metadata.get("backup_id", "")
and blob.metadata.get("metadata_version") == METADATA_VERSION
):
return blob

View File

@ -1,5 +1,6 @@
"""Config flow for Azure Storage integration."""
from collections.abc import Mapping
import logging
from typing import Any
@ -26,6 +27,26 @@ _LOGGER = logging.getLogger(__name__)
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for azure storage."""
def get_account_url(self, account_name: str) -> str:
"""Get the account URL."""
return f"https://{account_name}.blob.core.windows.net/"
async def validate_config(
self, container_client: ContainerClient
) -> dict[str, str]:
"""Validate the configuration."""
errors: dict[str, str] = {}
try:
await container_client.exists()
except ResourceNotFoundError:
errors["base"] = "cannot_connect"
except ClientAuthenticationError:
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
except Exception:
_LOGGER.exception("Unknown exception occurred")
errors["base"] = "unknown"
return errors
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@ -38,20 +59,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
)
container_client = ContainerClient(
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
container_name=user_input[CONF_CONTAINER_NAME],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
try:
await container_client.exists()
except ResourceNotFoundError:
errors["base"] = "cannot_connect"
except ClientAuthenticationError:
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
except Exception:
_LOGGER.exception("Unknown exception occurred")
errors["base"] = "unknown"
errors = await self.validate_config(container_client)
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
@ -70,3 +84,77 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauth dialog."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
container_client = ContainerClient(
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
errors = await self.validate_config(container_client)
if not errors:
return self.async_update_reload_and_abort(
reauth_entry,
data={**reauth_entry.data, **user_input},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
}
),
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Reconfigure the entry."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
container_client = ContainerClient(
account_url=self.get_account_url(
reconfigure_entry.data[CONF_ACCOUNT_NAME]
),
container_name=user_input[CONF_CONTAINER_NAME],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
errors = await self.validate_config(container_client)
if not errors:
return self.async_update_reload_and_abort(
reconfigure_entry,
data={**reconfigure_entry.data, **user_input},
)
return self.async_show_form(
data_schema=vol.Schema(
{
vol.Required(
CONF_CONTAINER_NAME,
default=reconfigure_entry.data[CONF_CONTAINER_NAME],
): str,
vol.Required(
CONF_STORAGE_ACCOUNT_KEY,
default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY],
): str,
}
),
errors=errors,
)

View File

@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["azure-storage-blob"],
"quality_scale": "bronze",
"quality_scale": "platinum",
"requirements": ["azure-storage-blob==12.24.0"]
}

View File

@ -57,7 +57,7 @@ rules:
status: exempt
comment: |
This integration does not have platforms.
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: done
# Gold
@ -121,7 +121,7 @@ rules:
status: exempt
comment: |
This integration does not have entities.
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues: done
stale-devices:
status: exempt

View File

@ -19,10 +19,34 @@
},
"description": "Set up an Azure (Blob) storage account to be used for backups.",
"title": "Add Azure storage account"
},
"reauth_confirm": {
"data": {
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
},
"data_description": {
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
},
"description": "Provide a new storage account key.",
"title": "Reauthenticate Azure storage account"
},
"reconfigure": {
"data": {
"container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]",
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
},
"data_description": {
"container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]",
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
},
"description": "Change the settings of the Azure storage integration.",
"title": "Reconfigure Azure storage account"
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
}
},
"issues": {

View File

@ -1,7 +1,9 @@
"""The Backup integration."""
from homeassistant.config_entries import SOURCE_SYSTEM
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, discovery_flow
from homeassistant.helpers.backup import DATA_BACKUP
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.typing import ConfigType
@ -18,10 +20,12 @@ from .agent import (
)
from .config import BackupConfig, CreateBackupParametersDict
from .const import DATA_MANAGER, DOMAIN
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
from .http import async_register_http_views
from .manager import (
BackupManager,
BackupManagerError,
BackupPlatformEvent,
BackupPlatformProtocol,
BackupReaderWriter,
BackupReaderWriterError,
@ -52,6 +56,7 @@ __all__ = [
"BackupConfig",
"BackupManagerError",
"BackupNotFound",
"BackupPlatformEvent",
"BackupPlatformProtocol",
"BackupReaderWriter",
"BackupReaderWriterError",
@ -74,6 +79,8 @@ __all__ = [
"suggested_filename_from_name_date",
]
PLATFORMS = [Platform.SENSOR]
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_register_http_views(hass)
discovery_flow.async_create_flow(
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
"""Set up a config entry."""
backup_manager: BackupManager = hass.data[DATA_MANAGER]
coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager)
await coordinator.async_config_entry_first_refresh()
entry.async_on_unload(coordinator.async_unsubscribe)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -41,6 +41,8 @@ class BackupAgent(abc.ABC):
) -> AsyncIterator[bytes]:
"""Download a backup file.
Raises BackupNotFound if the backup does not exist.
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
@ -67,6 +69,8 @@ class BackupAgent(abc.ABC):
) -> None:
"""Delete a backup file.
Raises BackupNotFound if the backup does not exist.
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
@ -79,8 +83,11 @@ class BackupAgent(abc.ABC):
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup | None:
"""Return a backup."""
) -> AgentBackup:
"""Return a backup.
Raises BackupNotFound if the backup does not exist.
"""
class LocalBackupAgent(BackupAgent):

View File

@ -88,13 +88,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup | None:
) -> AgentBackup:
"""Return a backup."""
if not self._loaded_backups:
await self._load_backups()
if backup_id not in self._backups:
return None
raise BackupNotFound(f"Backup {backup_id} not found")
backup, backup_path = self._backups[backup_id]
if not await self._hass.async_add_executor_job(backup_path.exists):
@ -107,7 +107,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
backup_path,
)
self._backups.pop(backup_id)
return None
raise BackupNotFound(f"Backup {backup_id} not found")
return backup
@ -130,10 +130,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
if not self._loaded_backups:
await self._load_backups()
try:
backup_path = self.get_backup_path(backup_id)
except BackupNotFound:
return
backup_path = self.get_backup_path(backup_id)
await self._hass.async_add_executor_job(backup_path.unlink, True)
LOGGER.debug("Deleted backup located at %s", backup_path)
self._backups.pop(backup_id)

View File

@ -0,0 +1,21 @@
"""Config flow for Home Assistant Backup integration."""
from __future__ import annotations
from typing import Any
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from .const import DOMAIN
class BackupConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Home Assistant Backup."""
VERSION = 1
async def async_step_system(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
return self.async_create_entry(title="Backup", data={})

View File

@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
LOGGER = getLogger(__package__)
EXCLUDE_FROM_BACKUP = [
"__pycache__/*",
".DS_Store",
"**/__pycache__/*",
"**/.DS_Store",
".HA_RESTORE",
"*.db-shm",
"*.log.*",

View File

@ -0,0 +1,81 @@
"""Coordinator for Home Assistant Backup integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.backup import (
async_subscribe_events,
async_subscribe_platform_events,
)
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, LOGGER
from .manager import (
BackupManager,
BackupManagerState,
BackupPlatformEvent,
ManagerStateEvent,
)
type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator]
@dataclass
class BackupCoordinatorData:
"""Class to hold backup data."""
backup_manager_state: BackupManagerState
last_successful_automatic_backup: datetime | None
next_scheduled_automatic_backup: datetime | None
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
"""Class to retrieve backup status."""
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
backup_manager: BackupManager,
) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=None,
)
self.unsubscribe: list[Callable[[], None]] = [
async_subscribe_events(hass, self._on_event),
async_subscribe_platform_events(hass, self._on_event),
]
self.backup_manager = backup_manager
@callback
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
"""Handle new event."""
LOGGER.debug("Received backup event: %s", event)
self.config_entry.async_create_task(self.hass, self.async_refresh())
async def _async_update_data(self) -> BackupCoordinatorData:
"""Update backup manager data."""
return BackupCoordinatorData(
self.backup_manager.state,
self.backup_manager.config.data.last_completed_automatic_backup,
self.backup_manager.config.data.schedule.next_automatic_backup,
)
@callback
def async_unsubscribe(self) -> None:
"""Unsubscribe from events."""
for unsub in self.unsubscribe:
unsub()

View File

@ -0,0 +1,27 @@
"""Diagnostics support for Home Assistant Backup integration."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .coordinator import BackupConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: BackupConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
return {
"backup_agents": [
{"name": agent.name, "agent_id": agent.agent_id}
for agent in coordinator.backup_manager.backup_agents.values()
],
"backup_config": async_redact_data(
coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD]
),
}

View File

@ -0,0 +1,36 @@
"""Base for backup entities."""
from __future__ import annotations
from homeassistant.const import __version__ as HA_VERSION
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import BackupDataUpdateCoordinator
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
"""Base entity for backup manager."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: BackupDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = entity_description.key
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, "backup_manager")},
manufacturer="Home Assistant",
model="Home Assistant Backup",
sw_version=HA_VERSION,
name="Backup",
entry_type=DeviceEntryType.SERVICE,
configuration_url="homeassistant://config/backup",
)

View File

@ -15,6 +15,7 @@ from multidict import istr
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame
from homeassistant.util import slugify
from . import util
@ -59,11 +60,19 @@ class DownloadBackupView(HomeAssistantView):
if agent_id not in manager.backup_agents:
return Response(status=HTTPStatus.BAD_REQUEST)
agent = manager.backup_agents[agent_id]
backup = await agent.async_get_backup(backup_id)
try:
backup = await agent.async_get_backup(backup_id)
except BackupNotFound:
return Response(status=HTTPStatus.NOT_FOUND)
# We don't need to check if the path exists, aiohttp.FileResponse will handle
# that
if backup is None:
# Check for None to be backwards compatible with the old BackupAgent API,
# this can be removed in HA Core 2025.10
if not backup:
frame.report_usage(
"returns None from BackupAgent.async_get_backup",
breaks_in_ha_version="2025.10",
integration_domain=agent_id.partition(".")[0],
)
return Response(status=HTTPStatus.NOT_FOUND)
headers = {
@ -92,6 +101,8 @@ class DownloadBackupView(HomeAssistantView):
) -> StreamResponse | FileResponse | Response:
if agent_id in manager.local_backup_agents:
local_agent = manager.local_backup_agents[agent_id]
# We don't need to check if the path exists, aiohttp.FileResponse will
# handle that
path = local_agent.get_backup_path(backup_id)
return FileResponse(path=path.as_posix(), headers=headers)

View File

@ -30,6 +30,7 @@ from homeassistant.backup_restore import (
from homeassistant.const import __version__ as HAVERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import (
frame,
instance_id,
integration_platform,
issue_registry as ir,
@ -64,6 +65,7 @@ from .models import (
AgentBackup,
BackupError,
BackupManagerError,
BackupNotFound,
BackupReaderWriterError,
BaseBackup,
Folder,
@ -227,6 +229,13 @@ class RestoreBackupEvent(ManagerStateEvent):
state: RestoreBackupState
@dataclass(frozen=True, kw_only=True, slots=True)
class BackupPlatformEvent:
"""Backup platform class."""
domain: str
@dataclass(frozen=True, kw_only=True, slots=True)
class BlockedEvent(ManagerStateEvent):
"""Backup manager blocked, Home Assistant is starting."""
@ -349,10 +358,13 @@ class BackupManager:
# Latest backup event and backup event subscribers
self.last_event: ManagerStateEvent = BlockedEvent()
self.last_non_idle_event: ManagerStateEvent | None = None
self.last_action_event: ManagerStateEvent | None = None
self._backup_event_subscriptions = hass.data[
DATA_BACKUP
].backup_event_subscriptions
self._backup_platform_event_subscriptions = hass.data[
DATA_BACKUP
].backup_platform_event_subscriptions
async def async_setup(self) -> None:
"""Set up the backup manager."""
@ -463,6 +475,9 @@ class BackupManager:
LOGGER.debug("%s platforms loaded in total", len(self.platforms))
LOGGER.debug("%s agents loaded in total", len(self.backup_agents))
LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents))
event = BackupPlatformEvent(domain=integration_domain)
for subscription in self._backup_platform_event_subscriptions:
subscription(event)
async def async_pre_backup_actions(self) -> None:
"""Perform pre backup actions."""
@ -665,6 +680,8 @@ class BackupManager:
)
for idx, result in enumerate(get_backup_results):
agent_id = agent_ids[idx]
if isinstance(result, BackupNotFound):
continue
if isinstance(result, BackupAgentError):
agent_errors[agent_id] = result
continue
@ -676,7 +693,14 @@ class BackupManager:
continue
if isinstance(result, BaseException):
raise result # unexpected error
# Check for None to be backwards compatible with the old BackupAgent API,
# this can be removed in HA Core 2025.10
if not result:
frame.report_usage(
"returns None from BackupAgent.async_get_backup",
breaks_in_ha_version="2025.10",
integration_domain=agent_id.partition(".")[0],
)
continue
if backup is None:
if known_backup := self.known_backups.get(backup_id):
@ -740,6 +764,8 @@ class BackupManager:
)
for idx, result in enumerate(delete_backup_results):
agent_id = agent_ids[idx]
if isinstance(result, BackupNotFound):
continue
if isinstance(result, BackupAgentError):
agent_errors[agent_id] = result
continue
@ -849,7 +875,7 @@ class BackupManager:
agent_errors = {
backup_id: error
for backup_id, error in zip(backup_ids, delete_results, strict=True)
if error
if error and not isinstance(error, BackupNotFound)
}
if agent_errors:
LOGGER.error(
@ -1281,7 +1307,20 @@ class BackupManager:
) -> None:
"""Initiate restoring a backup."""
agent = self.backup_agents[agent_id]
if not await agent.async_get_backup(backup_id):
try:
backup = await agent.async_get_backup(backup_id)
except BackupNotFound as err:
raise BackupManagerError(
f"Backup {backup_id} not found in agent {agent_id}"
) from err
# Check for None to be backwards compatible with the old BackupAgent API,
# this can be removed in HA Core 2025.10
if not backup:
frame.report_usage(
"returns None from BackupAgent.async_get_backup",
breaks_in_ha_version="2025.10",
integration_domain=agent_id.partition(".")[0],
)
raise BackupManagerError(
f"Backup {backup_id} not found in agent {agent_id}"
)
@ -1311,7 +1350,7 @@ class BackupManager:
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
self.last_event = event
if not isinstance(event, (BlockedEvent, IdleEvent)):
self.last_non_idle_event = event
self.last_action_event = event
for subscription in self._backup_event_subscriptions:
subscription(event)
@ -1369,7 +1408,20 @@ class BackupManager:
agent = self.backup_agents[agent_id]
except KeyError as err:
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
if not await agent.async_get_backup(backup_id):
try:
backup = await agent.async_get_backup(backup_id)
except BackupNotFound as err:
raise BackupManagerError(
f"Backup {backup_id} not found in agent {agent_id}"
) from err
# Check for None to be backwards compatible with the old BackupAgent API,
# this can be removed in HA Core 2025.10
if not backup:
frame.report_usage(
"returns None from BackupAgent.async_get_backup",
breaks_in_ha_version="2025.10",
integration_domain=agent_id.partition(".")[0],
)
raise BackupManagerError(
f"Backup {backup_id} not found in agent {agent_id}"
)
@ -1674,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter):
"""Filter to filter excludes."""
for exclude in excludes:
if not path.match(exclude):
# The home assistant core configuration directory is added as "data"
# in the tar file, so we need to prefix that path to the filters.
if not path.full_match(f"data/{exclude}"):
continue
LOGGER.debug("Ignoring %s because of %s", path, exclude)
return True

View File

@ -5,8 +5,9 @@
"codeowners": ["@home-assistant/core"],
"dependencies": ["http", "websocket_api"],
"documentation": "https://www.home-assistant.io/integrations/backup",
"integration_type": "system",
"integration_type": "service",
"iot_class": "calculated",
"quality_scale": "internal",
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
"single_config_entry": true
}

View File

@ -0,0 +1,75 @@
"""Sensor platform for Home Assistant Backup integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import BackupConfigEntry, BackupCoordinatorData
from .entity import BackupManagerEntity
from .manager import BackupManagerState
@dataclass(kw_only=True, frozen=True)
class BackupSensorEntityDescription(SensorEntityDescription):
"""Description for Home Assistant Backup sensor entities."""
value_fn: Callable[[BackupCoordinatorData], str | datetime | None]
BACKUP_MANAGER_DESCRIPTIONS = (
BackupSensorEntityDescription(
key="backup_manager_state",
translation_key="backup_manager_state",
device_class=SensorDeviceClass.ENUM,
options=[state.value for state in BackupManagerState],
value_fn=lambda data: data.backup_manager_state,
),
BackupSensorEntityDescription(
key="next_scheduled_automatic_backup",
translation_key="next_scheduled_automatic_backup",
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda data: data.next_scheduled_automatic_backup,
),
BackupSensorEntityDescription(
key="last_successful_automatic_backup",
translation_key="last_successful_automatic_backup",
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda data: data.last_successful_automatic_backup,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: BackupConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Sensor set up for backup config entry."""
coordinator = config_entry.runtime_data
async_add_entities(
BackupManagerSensor(coordinator, description)
for description in BACKUP_MANAGER_DESCRIPTIONS
)
class BackupManagerSensor(BackupManagerEntity, SensorEntity):
"""Sensor to track backup manager state."""
entity_description: BackupSensorEntityDescription
@property
def native_value(self) -> str | datetime | None:
"""Return native value of entity."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@ -22,5 +22,24 @@
"name": "Create automatic backup",
"description": "Creates a new backup with automatic backup settings."
}
},
"entity": {
"sensor": {
"backup_manager_state": {
"name": "Backup Manager State",
"state": {
"idle": "Idle",
"create_backup": "Creating a backup",
"receive_backup": "Receiving a backup",
"restore_backup": "Restoring a backup"
}
},
"next_scheduled_automatic_backup": {
"name": "Next scheduled automatic backup"
},
"last_successful_automatic_backup": {
"name": "Last successful automatic backup"
}
}
}
}

View File

@ -55,7 +55,7 @@ async def handle_info(
"backups": list(backups.values()),
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
"last_non_idle_event": manager.last_non_idle_event,
"last_action_event": manager.last_action_event,
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
"state": manager.state,

View File

@ -23,7 +23,7 @@
"entity": {
"climate": {
"auto_comfort": {
"name": "Auto comfort"
"name": "Auto Comfort"
}
},
"fan": {
@ -39,25 +39,25 @@
},
"number": {
"comfort_min_speed": {
"name": "Auto Comfort Minimum Speed"
"name": "Auto Comfort minimum speed"
},
"comfort_max_speed": {
"name": "Auto Comfort Maximum Speed"
"name": "Auto Comfort maximum speed"
},
"comfort_heat_assist_speed": {
"name": "Auto Comfort Heat Assist Speed"
"name": "Auto Comfort Heat Assist speed"
},
"return_to_auto_timeout": {
"name": "Return to Auto Timeout"
"name": "Return to Auto timeout"
},
"motion_sense_timeout": {
"name": "Motion Sense Timeout"
"name": "Motion sense timeout"
},
"light_return_to_auto_timeout": {
"name": "Light Return to Auto Timeout"
"name": "Light return to Auto timeout"
},
"light_auto_motion_timeout": {
"name": "Light Motion Sense Timeout"
"name": "Light motion sense timeout"
}
},
"sensor": {
@ -76,10 +76,10 @@
},
"switch": {
"legacy_ir_remote_enable": {
"name": "Legacy IR Remote"
"name": "Legacy IR remote"
},
"led_indicators_enable": {
"name": "Led Indicators"
"name": "LED indicators"
},
"comfort_heat_assist_enable": {
"name": "Auto Comfort Heat Assist"
@ -88,10 +88,10 @@
"name": "Beep"
},
"eco_enable": {
"name": "Eco Mode"
"name": "Eco mode"
},
"motion_sense_enable": {
"name": "Motion Sense"
"name": "Motion sense"
},
"return_to_auto_enable": {
"name": "Return to Auto"
@ -103,7 +103,7 @@
"name": "Dim to Warm"
},
"light_return_to_auto_enable": {
"name": "Light Return to Auto"
"name": "Light return to Auto"
}
}
}

View File

@ -21,6 +21,7 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.EVENT,
Platform.FAN,
Platform.LIGHT,
Platform.SELECT,
@ -28,7 +29,6 @@ PLATFORMS = [
Platform.TIME,
]
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
SYNC_TIME_INTERVAL = timedelta(hours=1)

View File

@ -0,0 +1,91 @@
"""Support for Balboa events."""
from __future__ import annotations
from datetime import datetime, timedelta
from pybalboa import EVENT_UPDATE, SpaClient
from homeassistant.components.event import EventEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.event import async_track_time_interval
from . import BalboaConfigEntry
from .entity import BalboaEntity
FAULT = "fault"
FAULT_DATE = "fault_date"
REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5)
FAULT_MESSAGE_CODE_MAP: dict[int, str] = {
15: "sensor_out_of_sync",
16: "low_flow",
17: "flow_failed",
18: "settings_reset",
19: "priming_mode",
20: "clock_failed",
21: "settings_reset",
22: "memory_failure",
26: "service_sensor_sync",
27: "heater_dry",
28: "heater_may_be_dry",
29: "water_too_hot",
30: "heater_too_hot",
31: "sensor_a_fault",
32: "sensor_b_fault",
34: "pump_stuck",
35: "hot_fault",
36: "gfci_test_failed",
37: "standby_mode",
}
FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values()))
async def async_setup_entry(
hass: HomeAssistant,
entry: BalboaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the spa's events."""
async_add_entities([BalboaEventEntity(entry.runtime_data)])
class BalboaEventEntity(BalboaEntity, EventEntity):
"""Representation of a Balboa event entity."""
_attr_event_types = FAULT_EVENT_TYPES
_attr_translation_key = FAULT
def __init__(self, spa: SpaClient) -> None:
"""Initialize a Balboa event entity."""
super().__init__(spa, FAULT)
@callback
def _async_handle_event(self) -> None:
"""Handle the fault event."""
if not (fault := self._client.fault):
return
fault_date = fault.fault_datetime.isoformat()
if self.state_attributes.get(FAULT_DATE) != fault_date:
self._trigger_event(
FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message),
{FAULT_DATE: fault_date, "code": fault.message_code},
)
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event))
async def request_fault_log(now: datetime | None = None) -> None:
"""Request the most recent fault log."""
await self._client.request_fault_log()
await request_fault_log()
self.async_on_remove(
async_track_time_interval(
self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL
)
)

View File

@ -57,6 +57,35 @@
}
}
},
"event": {
"fault": {
"name": "Fault",
"state_attributes": {
"event_type": {
"state": {
"sensor_out_of_sync": "Sensors are out of sync",
"low_flow": "The water flow is low",
"flow_failed": "The water flow has failed",
"settings_reset": "The settings have been reset",
"priming_mode": "Priming mode",
"clock_failed": "The clock has failed",
"memory_failure": "Program memory failure",
"service_sensor_sync": "Sensors are out of sync -- call for service",
"heater_dry": "The heater is dry",
"heater_may_be_dry": "The heater may be dry",
"water_too_hot": "The water is too hot",
"heater_too_hot": "The heater is too hot",
"sensor_a_fault": "Sensor A fault",
"sensor_b_fault": "Sensor B fault",
"pump_stuck": "A pump may be stuck on",
"hot_fault": "Hot fault",
"gfci_test_failed": "The GFCI test failed",
"standby_mode": "Standby mode (hold mode)"
}
}
}
}
},
"fan": {
"pump": {
"name": "Pump {index}"

View File

@ -29,7 +29,7 @@
"description": "Manually configure your Bang & Olufsen device."
},
"zeroconf_confirm": {
"title": "Setup Bang & Olufsen device",
"title": "Set up Bang & Olufsen device",
"description": "Confirm the configuration of the {model}-{serial_number} @ {host}."
}
}
@ -197,11 +197,11 @@
"services": {
"beolink_allstandby": {
"name": "Beolink all standby",
"description": "Set all Connected Beolink devices to standby."
"description": "Sets all connected Beolink devices to standby."
},
"beolink_expand": {
"name": "Beolink expand",
"description": "Expand current Beolink experience.",
"description": "Adds devices to the current Beolink experience.",
"fields": {
"all_discovered": {
"name": "All discovered",
@ -221,7 +221,7 @@
},
"beolink_join": {
"name": "Beolink join",
"description": "Join a Beolink experience.",
"description": "Joins a Beolink experience.",
"fields": {
"beolink_jid": {
"name": "Beolink JID",
@ -241,11 +241,11 @@
},
"beolink_leave": {
"name": "Beolink leave",
"description": "Leave a Beolink experience."
"description": "Leaves a Beolink experience."
},
"beolink_unexpand": {
"name": "Beolink unexpand",
"description": "Unexpand from current Beolink experience.",
"description": "Removes devices from the current Beolink experience.",
"fields": {
"beolink_jids": {
"name": "Beolink JIDs",
@ -274,7 +274,7 @@
"message": "An error occurred while attempting to play {media_type}: {error_message}."
},
"invalid_grouping_entity": {
"message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
"message": "Entity with ID {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
},
"invalid_sound_mode": {
"message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}."

View File

@ -501,18 +501,16 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
return
# presets and inputs might have the same name; presets have priority
url: str | None = None
for input_ in self._inputs:
if input_.text == source:
url = input_.url
await self._player.play_url(input_.url)
return
for preset in self._presets:
if preset.name == source:
url = preset.url
await self._player.load_preset(preset.id)
return
if url is None:
raise ServiceValidationError(f"Source {source} not found")
await self._player.play_url(url)
raise ServiceValidationError(f"Source {source} not found")
async def async_clear_playlist(self) -> None:
"""Clear players playlist."""

View File

@ -311,11 +311,24 @@ async def async_update_device(
update the device with the new location so they can
figure out where the adapter is.
"""
address = details[ADAPTER_ADDRESS]
connections = {(dr.CONNECTION_BLUETOOTH, address)}
device_registry = dr.async_get(hass)
# We only have one device for the config entry
# so if the address has been corrected, make
# sure the device entry reflects the correct
# address
for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id):
for conn_type, conn_value in device.connections:
if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address:
device_registry.async_update_device(
device.id, new_connections=connections
)
break
device_entry = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
name=adapter_human_name(adapter, address),
connections=connections,
manufacturer=details[ADAPTER_MANUFACTURER],
model=adapter_model(details),
sw_version=details.get(ADAPTER_SW_VERSION),
@ -342,9 +355,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
)
)
return True
address = entry.unique_id
assert address is not None
assert source_entry is not None
source_domain = entry.data[CONF_SOURCE_DOMAIN]
if mac_manufacturer := await get_manufacturer_from_mac(address):
manufacturer = f"{mac_manufacturer} ({source_domain})"

View File

@ -186,16 +186,28 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a flow initialized by an external scanner."""
source = user_input[CONF_SOURCE]
await self.async_set_unique_id(source)
source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID]
data = {
CONF_SOURCE: source,
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
}
self._abort_if_unique_id_configured(updates=data)
manager = get_manager()
scanner = manager.async_scanner_by_source(source)
for entry in self._async_current_entries(include_ignore=False):
# If the mac address needs to be corrected, migrate
# the config entry to the new mac address
if (
entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id
and entry.unique_id != source
):
self.hass.config_entries.async_update_entry(
entry, unique_id=source, data={**entry.data, **data}
)
self.hass.config_entries.async_schedule_reload(entry.entry_id)
return self.async_abort(reason="already_configured")
scanner = get_manager().async_scanner_by_source(source)
assert scanner is not None
return self.async_create_entry(title=scanner.name, data=data)

View File

@ -18,9 +18,9 @@
"bleak==0.22.3",
"bleak-retry-connector==3.9.0",
"bluetooth-adapters==0.21.4",
"bluetooth-auto-recovery==1.4.4",
"bluetooth-data-tools==1.23.4",
"dbus-fast==2.33.0",
"habluetooth==3.24.1"
"bluetooth-auto-recovery==1.4.5",
"bluetooth-data-tools==1.26.5",
"dbus-fast==2.43.0",
"habluetooth==3.37.0"
]
}

View File

@ -0,0 +1,62 @@
"""The Bosch Alarm integration."""
from __future__ import annotations
from ssl import SSLError
from bosch_alarm_mode2 import Panel
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL]
type BoschAlarmConfigEntry = ConfigEntry[Panel]
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
"""Set up Bosch Alarm from a config entry."""
panel = Panel(
host=entry.data[CONF_HOST],
port=entry.data[CONF_PORT],
automation_code=entry.data.get(CONF_PASSWORD),
installer_or_user_code=entry.data.get(
CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE)
),
)
try:
await panel.connect()
except (PermissionError, ValueError) as err:
await panel.disconnect()
raise ConfigEntryNotReady from err
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
await panel.disconnect()
raise ConfigEntryNotReady("Connection failed") from err
entry.runtime_data = panel
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
name=f"Bosch {panel.model}",
manufacturer="Bosch Security Systems",
model=panel.model,
sw_version=panel.firmware_version,
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
await entry.runtime_data.disconnect()
return unload_ok

View File

@ -0,0 +1,109 @@
"""Support for Bosch Alarm Panel."""
from __future__ import annotations
from bosch_alarm_mode2 import Panel
from homeassistant.components.alarm_control_panel import (
AlarmControlPanelEntity,
AlarmControlPanelEntityFeature,
AlarmControlPanelState,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BoschAlarmConfigEntry
from .const import DOMAIN
async def async_setup_entry(
hass: HomeAssistant,
config_entry: BoschAlarmConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up control panels for each area."""
panel = config_entry.runtime_data
async_add_entities(
AreaAlarmControlPanel(
panel,
area_id,
config_entry.unique_id or config_entry.entry_id,
)
for area_id in panel.areas
)
class AreaAlarmControlPanel(AlarmControlPanelEntity):
"""An alarm control panel entity for a bosch alarm panel."""
_attr_has_entity_name = True
_attr_supported_features = (
AlarmControlPanelEntityFeature.ARM_HOME
| AlarmControlPanelEntityFeature.ARM_AWAY
)
_attr_code_arm_required = False
_attr_name = None
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
"""Initialise a Bosch Alarm control panel entity."""
self.panel = panel
self._area = panel.areas[area_id]
self._area_id = area_id
self._attr_unique_id = f"{unique_id}_area_{area_id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._attr_unique_id)},
name=self._area.name,
manufacturer="Bosch Security Systems",
via_device=(
DOMAIN,
unique_id,
),
)
@property
def alarm_state(self) -> AlarmControlPanelState | None:
"""Return the state of the alarm."""
if self._area.is_triggered():
return AlarmControlPanelState.TRIGGERED
if self._area.is_disarmed():
return AlarmControlPanelState.DISARMED
if self._area.is_arming():
return AlarmControlPanelState.ARMING
if self._area.is_pending():
return AlarmControlPanelState.PENDING
if self._area.is_part_armed():
return AlarmControlPanelState.ARMED_HOME
if self._area.is_all_armed():
return AlarmControlPanelState.ARMED_AWAY
return None
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Disarm this panel."""
await self.panel.area_disarm(self._area_id)
async def async_alarm_arm_home(self, code: str | None = None) -> None:
"""Send arm home command."""
await self.panel.area_arm_part(self._area_id)
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send arm away command."""
await self.panel.area_arm_all(self._area_id)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.panel.connection_status()
async def async_added_to_hass(self) -> None:
"""Run when entity attached to hass."""
await super().async_added_to_hass()
self._area.status_observer.attach(self.schedule_update_ha_state)
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Run when entity removed from hass."""
await super().async_will_remove_from_hass()
self._area.status_observer.detach(self.schedule_update_ha_state)
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)

View File

@ -0,0 +1,165 @@
"""Config flow for Bosch Alarm integration."""
from __future__ import annotations
import asyncio
import logging
import ssl
from typing import Any
from bosch_alarm_mode2 import Panel
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_CODE,
CONF_HOST,
CONF_MODEL,
CONF_PASSWORD,
CONF_PORT,
)
import homeassistant.helpers.config_validation as cv
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=7700): cv.positive_int,
}
)
STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema(
{
vol.Required(CONF_USER_CODE): str,
}
)
STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema(
{
vol.Required(CONF_INSTALLER_CODE): str,
vol.Required(CONF_PASSWORD): str,
}
)
STEP_AUTH_DATA_SCHEMA_BG = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str})
async def try_connect(
data: dict[str, Any], load_selector: int = 0
) -> tuple[str, int | None]:
"""Validate the user input allows us to connect.
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
"""
panel = Panel(
host=data[CONF_HOST],
port=data[CONF_PORT],
automation_code=data.get(CONF_PASSWORD),
installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)),
)
try:
await panel.connect(load_selector)
finally:
await panel.disconnect()
return (panel.model, panel.serial_number)
class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Bosch Alarm."""
def __init__(self) -> None:
"""Init config flow."""
self._data: dict[str, Any] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
try:
# Use load_selector = 0 to fetch the panel model without authentication.
(model, serial) = await try_connect(user_input, 0)
except (
OSError,
ConnectionRefusedError,
ssl.SSLError,
asyncio.exceptions.TimeoutError,
) as e:
_LOGGER.error("Connection Error: %s", e)
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
self._data = user_input
self._data[CONF_MODEL] = model
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA, user_input
),
errors=errors,
)
async def async_step_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the auth step."""
errors: dict[str, str] = {}
# Each model variant requires a different authentication flow
if "Solution" in self._data[CONF_MODEL]:
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
elif "AMAX" in self._data[CONF_MODEL]:
schema = STEP_AUTH_DATA_SCHEMA_AMAX
else:
schema = STEP_AUTH_DATA_SCHEMA_BG
if user_input is not None:
self._data.update(user_input)
try:
(model, serial_number) = await try_connect(
self._data, Panel.LOAD_EXTENDED_INFO
)
except (PermissionError, ValueError) as e:
errors["base"] = "invalid_auth"
_LOGGER.error("Authentication Error: %s", e)
except (
OSError,
ConnectionRefusedError,
ssl.SSLError,
TimeoutError,
) as e:
_LOGGER.error("Connection Error: %s", e)
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if serial_number:
await self.async_set_unique_id(str(serial_number))
self._abort_if_unique_id_configured()
else:
self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]})
return self.async_create_entry(title=f"Bosch {model}", data=self._data)
return self.async_show_form(
step_id="auth",
data_schema=self.add_suggested_values_to_schema(schema, user_input),
errors=errors,
)

View File

@ -0,0 +1,6 @@
"""Constants for the Bosch Alarm integration."""
DOMAIN = "bosch_alarm"
HISTORY_ATTR = "history"
CONF_INSTALLER_CODE = "installer_code"
CONF_USER_CODE = "user_code"

View File

@ -0,0 +1,11 @@
{
"domain": "bosch_alarm",
"name": "Bosch Alarm",
"codeowners": ["@mag1024", "@sanjay900"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["bosch-alarm-mode2==0.4.3"]
}

Some files were not shown because too many files have changed in this diff Show More