mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 16:17:20 +00:00
Merge branch 'dev' into edenhaus-ecovacs-quality-scale
This commit is contained in:
commit
573644459a
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,5 +1,6 @@
|
||||
name: Report an issue with Home Assistant Core
|
||||
description: Report an issue with Home Assistant Core.
|
||||
type: Bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
38
.github/workflows/builder.yml
vendored
38
.github/workflows/builder.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@ -190,14 +190,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@ -256,14 +256,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@ -324,20 +324,20 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
uses: sigstore/cosign-installer@v3.8.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@ -502,14 +502,14 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@f9eaf234fc1c2e333c1eca18177db0f44fa6ba52 # v2.2.1
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
321
.github/workflows/ci.yaml
vendored
321
.github/workflows/ci.yaml
vendored
@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 11
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.6"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@ -89,6 +89,7 @@ jobs:
|
||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||
tests: ${{ steps.info.outputs.tests }}
|
||||
lint_only: ${{ steps.info.outputs.lint_only }}
|
||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@ -142,6 +143,7 @@ jobs:
|
||||
test_group_count=10
|
||||
tests="[]"
|
||||
tests_glob=""
|
||||
lint_only=""
|
||||
skip_coverage=""
|
||||
|
||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||
@ -192,6 +194,17 @@ jobs:
|
||||
test_full_suite="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event_name }}" == "push" \
|
||||
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||
then
|
||||
lint_only="true"
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||
then
|
||||
@ -217,6 +230,8 @@ jobs:
|
||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||
echo "tests_glob: ${tests_glob}"
|
||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||
echo "lint_only": ${lint_only}
|
||||
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
@ -234,17 +249,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@ -256,12 +271,12 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
@ -279,28 +294,28 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
@ -319,28 +334,28 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Run ruff
|
||||
run: |
|
||||
@ -359,28 +374,28 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
@ -469,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@ -482,22 +497,22 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
@ -537,7 +552,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@ -572,18 +587,18 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run hassfest
|
||||
run: |
|
||||
@ -605,24 +620,43 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run gen_requirements_all.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all validate
|
||||
|
||||
dependency-review:
|
||||
name: Dependency review
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& needs.info.outputs.requirements == 'true'
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.1
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
audit-licenses:
|
||||
name: Audit licenses
|
||||
runs-on: ubuntu-24.04
|
||||
@ -643,25 +677,25 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Extract license data
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@ -686,18 +720,18 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
@ -733,18 +767,18 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
@ -778,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@ -791,22 +825,22 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-mypy-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
||||
env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Register mypy problem matcher
|
||||
@ -829,11 +863,7 @@ jobs:
|
||||
prepare-pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@ -859,25 +889,25 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run split_tests.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@ -886,11 +916,7 @@ jobs:
|
||||
pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@ -918,22 +944,24 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@ -942,7 +970,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@ -962,6 +990,7 @@ jobs:
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||
@ -980,18 +1009,30 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Remove pytest_buckets
|
||||
run: rm pytest_buckets.txt
|
||||
- name: Check dirty
|
||||
@ -1009,11 +1050,7 @@ jobs:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.mariadb_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@ -1040,22 +1077,24 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@ -1088,6 +1127,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@ -1108,7 +1148,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1116,12 +1156,25 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@ -1137,11 +1190,7 @@ jobs:
|
||||
POSTGRES_PASSWORD: password
|
||||
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.postgresql_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@ -1167,7 +1216,8 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg
|
||||
libturbojpeg \
|
||||
libxml2-utils
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
@ -1175,17 +1225,18 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@ -1218,6 +1269,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@ -1239,7 +1291,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1247,12 +1299,25 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@ -1271,12 +1336,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@ -1285,11 +1350,7 @@ jobs:
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.tests_glob
|
||||
&& needs.info.outputs.test_full_suite == 'false'
|
||||
needs:
|
||||
@ -1317,22 +1378,24 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@ -1365,6 +1428,7 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@ -1382,18 +1446,30 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@ -1410,12 +1486,37 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
upload-test-results:
|
||||
name: Upload test results to Codecov
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
- pytest-partial
|
||||
- pytest-full
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.10
|
||||
uses: github/codeql-action/init@v3.28.18
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.10
|
||||
uses: github/codeql-action/analyze@v3.28.18
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
68
.github/workflows/wheels.yml
vendored
68
.github/workflows/wheels.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@ -218,16 +218,8 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -69,6 +69,7 @@ test-reports/
|
||||
test-results.xml
|
||||
test-output.xml
|
||||
pytest-*.txt
|
||||
junit.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
rev: v0.11.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
homeassistant.components.braviatv.*
|
||||
homeassistant.components.bring.*
|
||||
homeassistant.components.brother.*
|
||||
@ -136,6 +137,7 @@ homeassistant.components.clicksend.*
|
||||
homeassistant.components.climate.*
|
||||
homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
@ -268,6 +270,7 @@ homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
@ -289,6 +292,7 @@ homeassistant.components.kaleidescape.*
|
||||
homeassistant.components.knocki.*
|
||||
homeassistant.components.knx.*
|
||||
homeassistant.components.kraken.*
|
||||
homeassistant.components.kulersky.*
|
||||
homeassistant.components.lacrosse.*
|
||||
homeassistant.components.lacrosse_view.*
|
||||
homeassistant.components.lamarzocco.*
|
||||
@ -329,6 +333,7 @@ homeassistant.components.media_player.*
|
||||
homeassistant.components.media_source.*
|
||||
homeassistant.components.met_eireann.*
|
||||
homeassistant.components.metoffice.*
|
||||
homeassistant.components.miele.*
|
||||
homeassistant.components.mikrotik.*
|
||||
homeassistant.components.min_max.*
|
||||
homeassistant.components.minecraft_server.*
|
||||
@ -360,8 +365,10 @@ homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.ntfy.*
|
||||
homeassistant.components.number.*
|
||||
homeassistant.components.nut.*
|
||||
homeassistant.components.ohme.*
|
||||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onedrive.*
|
||||
@ -381,6 +388,7 @@ homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@ -396,6 +404,7 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.pyload.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
@ -410,6 +419,7 @@ homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.remote_calendar.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
homeassistant.components.repairs.*
|
||||
@ -425,7 +435,6 @@ homeassistant.components.roku.*
|
||||
homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
homeassistant.components.rss_feed_template.*
|
||||
homeassistant.components.rtsp_to_webrtc.*
|
||||
homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
@ -455,6 +464,7 @@ homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
@ -528,6 +538,7 @@ homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.wake_on_lan.*
|
||||
homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
|
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@ -4,7 +4,7 @@
|
||||
{
|
||||
"label": "Run Home Assistant Core",
|
||||
"type": "shell",
|
||||
"command": "hass -c ./config",
|
||||
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
|
68
CODEOWNERS
generated
68
CODEOWNERS
generated
@ -46,8 +46,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
/tests/components/acmeda/ @atmurray
|
||||
/homeassistant/components/adax/ @danielhiversen
|
||||
/tests/components/adax/ @danielhiversen
|
||||
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
||||
/tests/components/adax/ @danielhiversen @lazytarget
|
||||
/homeassistant/components/adguard/ @frenck
|
||||
/tests/components/adguard/ @frenck
|
||||
/homeassistant/components/ads/ @mrpasztoradam
|
||||
@ -171,6 +171,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/avea/ @pattyland
|
||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||
/tests/components/aws_s3/ @tomasbedrich
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
/tests/components/axis/ @Kane610
|
||||
/homeassistant/components/azure_data_explorer/ @kaareseras
|
||||
@ -216,6 +218,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/homeassistant/components/bosch_shc/ @tschamm
|
||||
/tests/components/bosch_shc/ @tschamm
|
||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||
@ -430,7 +434,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
@ -451,8 +455,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26 @baqs
|
||||
/tests/components/ezviz/ @RenierM26 @baqs
|
||||
/homeassistant/components/ezviz/ @RenierM26
|
||||
/tests/components/ezviz/ @RenierM26
|
||||
/homeassistant/components/faa_delays/ @ntilley905
|
||||
/tests/components/faa_delays/ @ntilley905
|
||||
/homeassistant/components/fan/ @home-assistant/core
|
||||
@ -570,8 +574,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_cloud/ @lufton @tronikos
|
||||
/homeassistant/components/google_drive/ @tronikos
|
||||
/tests/components/google_drive/ @tronikos
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/homeassistant/components/google_mail/ @tkdrob
|
||||
/tests/components/google_mail/ @tkdrob
|
||||
/homeassistant/components/google_photos/ @allenporter
|
||||
@ -702,8 +706,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/image_upload/ @home-assistant/core
|
||||
/homeassistant/components/imap/ @jbouwh
|
||||
/tests/components/imap/ @jbouwh
|
||||
/homeassistant/components/imeon_inverter/ @Imeon-Energy
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/immich/ @mib1185
|
||||
/tests/components/immich/ @mib1185
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
@ -933,6 +941,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||
/homeassistant/components/microbees/ @microBeesTech
|
||||
/tests/components/microbees/ @microBeesTech
|
||||
/homeassistant/components/miele/ @astrandb
|
||||
/tests/components/miele/ @astrandb
|
||||
/homeassistant/components/mikrotik/ @engrbm87
|
||||
/tests/components/mikrotik/ @engrbm87
|
||||
/homeassistant/components/mill/ @danielhiversen
|
||||
@ -1045,6 +1055,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nsw_fuel_station/ @nickw444
|
||||
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/homeassistant/components/ntfy/ @tr4nt0r
|
||||
/tests/components/ntfy/ @tr4nt0r
|
||||
/homeassistant/components/nuheat/ @tstabrawa
|
||||
/tests/components/nuheat/ @tstabrawa
|
||||
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
|
||||
@ -1073,8 +1085,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/onboarding/ @home-assistant/core
|
||||
/tests/components/onboarding/ @home-assistant/core
|
||||
/homeassistant/components/oncue/ @bdraco @peterager
|
||||
/tests/components/oncue/ @bdraco @peterager
|
||||
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onedrive/ @zweckj
|
||||
@ -1103,8 +1113,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/opentherm_gw/ @mvn23
|
||||
/homeassistant/components/openuv/ @bachya
|
||||
/tests/components/openuv/ @bachya
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/homeassistant/components/opnsense/ @mtreinish
|
||||
/tests/components/opnsense/ @mtreinish
|
||||
/homeassistant/components/opower/ @tronikos
|
||||
@ -1183,6 +1193,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
/tests/components/pterodactyl/ @elmurato
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
/tests/components/pure_energie/ @klaasnicolaas
|
||||
/homeassistant/components/purpleair/ @bachya
|
||||
@ -1250,8 +1262,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/recovery_mode/ @home-assistant/core
|
||||
/homeassistant/components/refoss/ @ashionky
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/rehlko/ @bdraco @peterager
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
@ -1293,8 +1309,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rpi_power/ @shenxn @swetoast
|
||||
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
||||
/tests/components/rss_feed_template/ @home-assistant/core
|
||||
/homeassistant/components/rtsp_to_webrtc/ @allenporter
|
||||
/tests/components/rtsp_to_webrtc/ @allenporter
|
||||
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/homeassistant/components/russound_rio/ @noahhusby
|
||||
@ -1381,7 +1395,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/siren/ @home-assistant/core @raman325
|
||||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
@ -1428,8 +1441,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||
/tests/components/solax/ @squishykid @Darsstar
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
/tests/components/soma/ @ratsept @sebfortier2288
|
||||
/homeassistant/components/soma/ @ratsept
|
||||
/tests/components/soma/ @ratsept
|
||||
/homeassistant/components/sonarr/ @ctalkington
|
||||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
@ -1461,7 +1474,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
@ -1472,10 +1486,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
/tests/components/sunweg/ @rokam
|
||||
/homeassistant/components/sun/ @home-assistant/core
|
||||
/tests/components/sun/ @home-assistant/core
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
@ -1488,8 +1500,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switch_as_x/ @home-assistant/core
|
||||
/homeassistant/components/switchbee/ @jafar-atili
|
||||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
@ -1529,8 +1541,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
@ -1666,8 +1678,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam
|
||||
/tests/components/voip/ @balloob @synesthesiam
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
@ -1784,6 +1796,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/zeversolar/ @kvanzuijlen
|
||||
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||
/tests/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||
/homeassistant/components/zimi/ @markhannon
|
||||
/tests/components/zimi/ @markhannon
|
||||
/homeassistant/components/zodiac/ @JulienTant
|
||||
/tests/components/zodiac/ @JulienTant
|
||||
/homeassistant/components/zone/ @home-assistant/core
|
||||
|
4
Dockerfile
generated
4
Dockerfile
generated
@ -25,13 +25,13 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.6.1
|
||||
RUN pip3 install uv==0.7.1
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
12
build.yaml
12
build.yaml
@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.02.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.02.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.02.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.02.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.02.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
@ -19,4 +19,4 @@ labels:
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache License 2.0
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
|
@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.set_default_verify_paths,
|
||||
object=SSLContext,
|
||||
function="set_default_verify_paths",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.open,
|
||||
object=Path,
|
||||
|
@ -53,6 +53,7 @@ from .components import (
|
||||
logbook as logbook_pre_import, # noqa: F401
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
onboarding as onboarding_pre_import, # noqa: F401
|
||||
person as person_pre_import, # noqa: F401
|
||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
repairs as repairs_pre_import, # noqa: F401
|
||||
search as search_pre_import, # noqa: F401
|
||||
@ -81,6 +82,7 @@ from .helpers import (
|
||||
entity,
|
||||
entity_registry,
|
||||
floor_registry,
|
||||
frame,
|
||||
issue_registry,
|
||||
label_registry,
|
||||
recorder,
|
||||
@ -92,6 +94,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .loader import Integration
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
# that it is not part of the public API and should not be used
|
||||
@ -298,14 +301,6 @@ async def async_setup_hass(
|
||||
|
||||
return hass
|
||||
|
||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
||||
"""Stop hass."""
|
||||
# Ask integrations to shut down. It's messy but we can't
|
||||
# do a clean stop without knowing what is broken
|
||||
with contextlib.suppress(TimeoutError):
|
||||
async with hass.timeout.async_timeout(10):
|
||||
await hass.async_stop()
|
||||
|
||||
hass = await create_hass()
|
||||
|
||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||
@ -344,7 +339,7 @@ async def async_setup_hass(
|
||||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
@ -352,7 +347,7 @@ async def async_setup_hass(
|
||||
"Unable to set up core integrations. Activating recovery mode"
|
||||
)
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(
|
||||
@ -367,7 +362,7 @@ async def async_setup_hass(
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
if old_logging:
|
||||
@ -441,9 +436,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
translation.async_setup(hass)
|
||||
entity.async_setup(hass)
|
||||
frame.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
translation.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
@ -664,11 +660,10 @@ def _create_log_file(
|
||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
return err_handler
|
||||
|
||||
@ -718,20 +713,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
return domains
|
||||
|
||||
|
||||
async def _async_resolve_domains_to_setup(
|
||||
async def _async_resolve_domains_and_preload(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||
"""Resolve all dependencies and return list of domains to set up."""
|
||||
) -> tuple[dict[str, Integration], dict[str, Integration]]:
|
||||
"""Resolve all dependencies and return integrations to set up.
|
||||
|
||||
The return value is a tuple of two dictionaries:
|
||||
- The first dictionary contains integrations
|
||||
specified by the configuration (including config entries).
|
||||
- The second dictionary contains the same integrations as the first dictionary
|
||||
together with all their dependencies.
|
||||
"""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
needed_requirements: set[str] = set()
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to
|
||||
# to `domains_to_setup so they can be setup first instead of
|
||||
# discovering them when later when a config entry setup task
|
||||
# notices its needed and there is already a long line to use
|
||||
# the import executor.
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
#
|
||||
# For example if we have
|
||||
# sensor:
|
||||
@ -747,111 +747,78 @@ async def _async_resolve_domains_to_setup(
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Load manifests for base platforms and platform based integrations
|
||||
# that are defined under base platforms right away since we do not require
|
||||
# the manifest to list them as dependencies and we want to avoid the lock
|
||||
# contention when multiple integrations try to load them at once
|
||||
additional_manifests_to_load = {
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
additional_domains_to_process = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
}
|
||||
|
||||
translations_to_load = additional_manifests_to_load.copy()
|
||||
|
||||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start right-away
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve or additional_manifests_to_load:
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
integrations_or_excs = await loader.async_get_integrations(
|
||||
hass, {*domains_to_setup, *additional_domains_to_process}
|
||||
)
|
||||
# Eliminate those missing or with invalid manifest
|
||||
integrations_to_process = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_or_excs.items()
|
||||
if isinstance(itg, Integration)
|
||||
}
|
||||
integrations_dependencies = await loader.resolve_integrations_dependencies(
|
||||
hass, integrations_to_process.values()
|
||||
)
|
||||
# Eliminate those without valid dependencies
|
||||
integrations_to_process = {
|
||||
domain: integrations_to_process[domain] for domain in integrations_dependencies
|
||||
}
|
||||
|
||||
if additional_manifests_to_load:
|
||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
||||
additional_manifests_to_load.clear()
|
||||
else:
|
||||
to_get = old_to_resolve
|
||||
integrations_to_setup = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_to_process.items()
|
||||
if domain in domains_to_setup
|
||||
}
|
||||
all_integrations_to_setup = integrations_to_setup.copy()
|
||||
all_integrations_to_setup.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep))
|
||||
for domain in integrations_to_setup
|
||||
for dep in integrations_dependencies[domain].difference(
|
||||
all_integrations_to_setup
|
||||
)
|
||||
)
|
||||
|
||||
manifest_deps: set[str] = set()
|
||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
||||
integrations_to_process: list[loader.Integration] = []
|
||||
|
||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
||||
if not isinstance(itg, loader.Integration):
|
||||
continue
|
||||
integration_cache[domain] = itg
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
# Make sure manifests for dependencies are loaded in the next
|
||||
# loop to try to group as many as manifest loads in a single
|
||||
# call to avoid the creating one-off executor jobs later in
|
||||
# the setup process
|
||||
additional_manifests_to_load.update(
|
||||
dep
|
||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
||||
if dep not in integration_cache
|
||||
)
|
||||
|
||||
if domain not in old_to_resolve:
|
||||
continue
|
||||
|
||||
integrations_to_process.append(itg)
|
||||
manifest_deps.update(itg.dependencies)
|
||||
manifest_deps.update(itg.after_dependencies)
|
||||
if not itg.all_dependencies_resolved:
|
||||
resolve_dependencies_tasks.append(
|
||||
create_eager_task(
|
||||
itg.resolve_dependencies(),
|
||||
name=f"resolve dependencies {domain}",
|
||||
loop=hass.loop,
|
||||
)
|
||||
)
|
||||
|
||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
||||
# If there are dependencies, try to preload all
|
||||
# the integrations manifest at once and add them
|
||||
# to the list of requirements we need to install
|
||||
# so we can try to check if they are already installed
|
||||
# in a single call below which avoids each integration
|
||||
# having to wait for the lock to do it individually
|
||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
||||
for dependant_domain, dependant_itg in deps.items():
|
||||
if isinstance(dependant_itg, loader.Integration):
|
||||
integration_cache[dependant_domain] = dependant_itg
|
||||
needed_requirements.update(dependant_itg.requirements)
|
||||
|
||||
if resolve_dependencies_tasks:
|
||||
await asyncio.gather(*resolve_dependencies_tasks)
|
||||
|
||||
for itg in integrations_to_process:
|
||||
try:
|
||||
all_deps = itg.all_dependencies
|
||||
except RuntimeError:
|
||||
# Integration.all_dependencies raises RuntimeError if
|
||||
# dependencies could not be resolved
|
||||
continue
|
||||
for dep in all_deps:
|
||||
if dep in domains_to_setup:
|
||||
continue
|
||||
domains_to_setup.add(dep)
|
||||
to_resolve.add(dep)
|
||||
|
||||
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
||||
# Gather requirements for all integrations,
|
||||
# their dependencies and after dependencies.
|
||||
# To gather all the requirements we must ignore exceptions here.
|
||||
# The exceptions will be detected and handled later in the bootstrap process.
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, integrations_to_process.values(), ignore_exceptions=True
|
||||
)
|
||||
)
|
||||
integrations_requirements = {
|
||||
domain: itg.requirements for domain, itg in integrations_to_process.items()
|
||||
}
|
||||
integrations_requirements.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
|
||||
for deps in integrations_after_dependencies.values()
|
||||
for dep in deps.difference(integrations_requirements)
|
||||
)
|
||||
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
|
||||
|
||||
# Optimistically check if requirements are already installed
|
||||
# ahead of setting up the integrations so we can prime the cache
|
||||
# We do not wait for this since its an optimization only
|
||||
# We do not wait for this since it's an optimization only
|
||||
hass.async_create_background_task(
|
||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
||||
requirements.async_load_installed_versions(hass, all_requirements),
|
||||
"check installed requirements",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
#
|
||||
# Only add the domains_to_setup after we finish resolving
|
||||
# as new domains are likely to added in the process
|
||||
#
|
||||
translations_to_load.update(domains_to_setup)
|
||||
# Start loading translations for all integrations we are going to set up
|
||||
# in the background so they are ready when we need them. This avoids a
|
||||
# lot of waiting for the translation load lock and a thundering herd of
|
||||
@ -862,6 +829,7 @@ async def _async_resolve_domains_to_setup(
|
||||
# hold the translation load lock and if anything is fast enough to
|
||||
# wait for the translation load lock, loading will be done by the
|
||||
# time it gets to it.
|
||||
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
|
||||
hass.async_create_background_task(
|
||||
translation.async_load_integrations(hass, translations_to_load),
|
||||
"load translations",
|
||||
@ -873,13 +841,13 @@ async def _async_resolve_domains_to_setup(
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
[*PRELOAD_STORAGE, *all_integrations_to_setup]
|
||||
),
|
||||
"preload storage",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
return domains_to_setup, integration_cache
|
||||
return integrations_to_setup, all_integrations_to_setup
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
@ -889,69 +857,84 @@ async def _async_set_up_integrations(
|
||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||
watcher.async_start()
|
||||
|
||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||
hass, config
|
||||
)
|
||||
stage_2_domains = domains_to_setup.copy()
|
||||
# Detect all cycles
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, all_integrations.values(), set(all_integrations)
|
||||
)
|
||||
)
|
||||
all_domains = set(integrations_after_dependencies)
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in domains_to_setup:
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in domains_to_setup:
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group & domains_to_setup, timeout)
|
||||
(name, domain_group, timeout)
|
||||
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||
),
|
||||
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
|
||||
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
|
||||
("2", domains, STAGE_2_TIMEOUT),
|
||||
]
|
||||
|
||||
_LOGGER.info("Setting up stage 0 and 1")
|
||||
for name, domain_group, timeout in stage_0_and_1_domains:
|
||||
if not domain_group:
|
||||
_LOGGER.info("Setting up stage 0")
|
||||
for name, domain_group, timeout in stages:
|
||||
stage_domains_unfiltered = domain_group & all_domains
|
||||
if not stage_domains_unfiltered:
|
||||
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
|
||||
continue
|
||||
|
||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||
to_be_loaded = domain_group.copy()
|
||||
to_be_loaded.update(
|
||||
stage_domains = stage_domains_unfiltered - hass.config.components
|
||||
if not stage_domains:
|
||||
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
|
||||
continue
|
||||
|
||||
stage_dep_domains_unfiltered = {
|
||||
dep
|
||||
for domain in domain_group
|
||||
if (integration := integration_cache.get(domain)) is not None
|
||||
for dep in integration.all_dependencies
|
||||
for domain in stage_domains
|
||||
for dep in integrations_after_dependencies[domain]
|
||||
if dep not in stage_domains
|
||||
}
|
||||
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
name,
|
||||
stage_domains,
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||
stage_2_domains -= to_be_loaded
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
else:
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for %s waiting on %s - moving forward",
|
||||
name,
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
# Add after dependencies when setting up stage 2 domains
|
||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
||||
|
||||
if stage_2_domains:
|
||||
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
try:
|
||||
async with hass.timeout.async_timeout(
|
||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||
):
|
||||
await _async_setup_multi_components(hass, stage_2_domains, config)
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
||||
"Setup timed out for stage %s waiting on %s - moving forward",
|
||||
name,
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
@ -1053,8 +1036,6 @@ async def _async_setup_multi_components(
|
||||
config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up multiple domains. Log on failure."""
|
||||
# Avoid creating tasks for domains that were setup in a previous stage
|
||||
domains_not_yet_setup = domains - hass.config.components
|
||||
# Create setup tasks for base platforms first since everything will have
|
||||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
@ -1064,9 +1045,7 @@ async def _async_setup_multi_components(
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
)
|
||||
for domain in sorted(
|
||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
||||
)
|
||||
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
|
@ -1,5 +1,12 @@
|
||||
{
|
||||
"domain": "amazon",
|
||||
"name": "Amazon",
|
||||
"integrations": ["alexa", "amazon_polly", "aws", "fire_tv", "route53"]
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
"fire_tv",
|
||||
"route53"
|
||||
]
|
||||
}
|
||||
|
5
homeassistant/brands/bosch.json
Normal file
5
homeassistant/brands/bosch.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "bosch",
|
||||
"name": "Bosch",
|
||||
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
|
||||
}
|
5
homeassistant/brands/eve.json
Normal file
5
homeassistant/brands/eve.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "eve",
|
||||
"name": "Eve",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
@ -6,6 +6,7 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
|
6
homeassistant/brands/nuki.json
Normal file
6
homeassistant/brands/nuki.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "nuki",
|
||||
"name": "Nuki",
|
||||
"integrations": ["nuki"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
@ -24,7 +24,7 @@ from homeassistant.components.weather import (
|
||||
|
||||
API_METRIC: Final = "Metric"
|
||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||
ATTR_CATEGORY: Final = "Category"
|
||||
ATTR_CATEGORY_VALUE = "CategoryValue"
|
||||
ATTR_DIRECTION: Final = "Direction"
|
||||
ATTR_ENGLISH: Final = "English"
|
||||
ATTR_LEVEL: Final = "level"
|
||||
@ -55,5 +55,19 @@ CONDITION_MAP = {
|
||||
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
||||
for cond_code in cond_codes
|
||||
}
|
||||
AIR_QUALITY_CATEGORY_MAP = {
|
||||
1: "good",
|
||||
2: "moderate",
|
||||
3: "unhealthy",
|
||||
4: "very_unhealthy",
|
||||
5: "hazardous",
|
||||
}
|
||||
POLLEN_CATEGORY_MAP = {
|
||||
1: "low",
|
||||
2: "moderate",
|
||||
3: "high",
|
||||
4: "very_high",
|
||||
5: "extreme",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_current_conditions()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="current_conditions_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
@ -117,9 +121,15 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
"""Update data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast()
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="forecast_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"requirements": ["accuweather==4.2.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -29,8 +29,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
AIR_QUALITY_CATEGORY_MAP,
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY,
|
||||
ATTR_CATEGORY_VALUE,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_ENGLISH,
|
||||
ATTR_LEVEL,
|
||||
@ -38,6 +39,7 @@ from .const import (
|
||||
ATTR_VALUE,
|
||||
ATTRIBUTION,
|
||||
MAX_FORECAST_DAYS,
|
||||
POLLEN_CATEGORY_MAP,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
@ -59,9 +61,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
AccuWeatherSensorDescription(
|
||||
key="AirQuality",
|
||||
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
|
||||
value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
|
||||
options=list(AIR_QUALITY_CATEGORY_MAP.values()),
|
||||
translation_key="air_quality",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@ -83,7 +85,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
translation_key="grass_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@ -107,7 +111,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
translation_key="mold_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@ -115,7 +121,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
translation_key="ragweed_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@ -181,14 +189,18 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
translation_key="tree_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="UVIndex",
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
translation_key="uv_index_forecast",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -26,10 +26,20 @@
|
||||
"state": {
|
||||
"good": "Good",
|
||||
"hazardous": "Hazardous",
|
||||
"high": "High",
|
||||
"low": "Low",
|
||||
"moderate": "Moderate",
|
||||
"unhealthy": "Unhealthy"
|
||||
"unhealthy": "Unhealthy",
|
||||
"very_unhealthy": "Very unhealthy"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]",
|
||||
"very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"apparent_temperature": {
|
||||
@ -62,12 +72,11 @@
|
||||
"level": {
|
||||
"name": "Level",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "Extreme",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "Moderate",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -81,12 +90,11 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -100,6 +108,15 @@
|
||||
"steady": "Steady",
|
||||
"rising": "Rising",
|
||||
"falling": "Falling"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
|
||||
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
|
||||
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ragweed_pollen": {
|
||||
@ -108,12 +125,11 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -154,12 +170,11 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -170,12 +185,11 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -186,12 +200,11 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -222,6 +235,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"current_conditions_update_error": {
|
||||
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||
},
|
||||
"forecast_update_error": {
|
||||
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
|
||||
}
|
||||
},
|
||||
"system_health": {
|
||||
"info": {
|
||||
"can_reach_server": "Reach AccuWeather server",
|
||||
|
@ -2,25 +2,38 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONNECTION_TYPE, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
"""Set up Adax from a config entry."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
local_coordinator = AdaxLocalCoordinator(hass, entry)
|
||||
entry.runtime_data = local_coordinator
|
||||
else:
|
||||
cloud_coordinator = AdaxCloudCoordinator(hass, entry)
|
||||
entry.runtime_data = cloud_coordinator
|
||||
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AdaxConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
# convert title and unique_id to string
|
||||
if config_entry.version == 1:
|
||||
|
@ -12,57 +12,42 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_UNIQUE_ID,
|
||||
PRECISION_WHOLE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ACCOUNT_ID, CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from . import AdaxConfigEntry
|
||||
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxLocalCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: AdaxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax thermostat with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
local_coordinator = cast(AdaxLocalCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
[LocalAdaxDevice(adax_data_handler, entry.data[CONF_UNIQUE_ID])], True
|
||||
[LocalAdaxDevice(local_coordinator, entry.data[CONF_UNIQUE_ID])],
|
||||
)
|
||||
else:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
AdaxDevice(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
)
|
||||
return
|
||||
|
||||
adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
(
|
||||
AdaxDevice(room, adax_data_handler)
|
||||
for room in await adax_data_handler.get_rooms()
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class AdaxDevice(ClimateEntity):
|
||||
class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
@ -76,20 +61,37 @@ class AdaxDevice(ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the heater."""
|
||||
self._device_id = heater_data["id"]
|
||||
self._adax_data_handler = adax_data_handler
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: Adax = coordinator.adax_data_handler
|
||||
self._device_id = device_id
|
||||
|
||||
self._attr_unique_id = f"{heater_data['homeId']}_{heater_data['id']}"
|
||||
self._attr_name = self.room["name"]
|
||||
self._attr_unique_id = f"{self.room['homeId']}_{self._device_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, heater_data["id"])},
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
# Instead of setting the device name to the entity name, adax
|
||||
# should be updated to set has_entity_name = True, and set the entity
|
||||
# name to None
|
||||
name=cast(str | None, self.name),
|
||||
manufacturer="Adax",
|
||||
)
|
||||
self._apply_data(self.room)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available or not."""
|
||||
return super().available and self._device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def room(self) -> dict[str, Any]:
|
||||
"""Gets the data for this particular device."""
|
||||
return self.coordinator.data[self._device_id]
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
@ -104,7 +106,9 @@ class AdaxDevice(ClimateEntity):
|
||||
)
|
||||
else:
|
||||
return
|
||||
await self._adax_data_handler.update()
|
||||
|
||||
# Request data refresh from source to verify that update was successful
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
@ -114,28 +118,31 @@ class AdaxDevice(ClimateEntity):
|
||||
self._device_id, temperature, True
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
for room in await self._adax_data_handler.get_rooms():
|
||||
if room["id"] != self._device_id:
|
||||
continue
|
||||
self._attr_name = room["name"]
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
return
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if room := self.room:
|
||||
self._apply_data(room)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
def _apply_data(self, room: dict[str, Any]) -> None:
|
||||
"""Update the appropriate attributues based on received data."""
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
|
||||
|
||||
class LocalAdaxDevice(ClimateEntity):
|
||||
class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_hvac_mode = HVACMode.OFF
|
||||
_attr_icon = "mdi:radiator-off"
|
||||
_attr_max_temp = 35
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = (
|
||||
@ -146,9 +153,10 @@ class LocalAdaxDevice(ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, adax_data_handler: AdaxLocal, unique_id: str) -> None:
|
||||
def __init__(self, coordinator: AdaxLocalCoordinator, unique_id: str) -> None:
|
||||
"""Initialize the heater."""
|
||||
self._adax_data_handler = adax_data_handler
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: AdaxLocal = coordinator.adax_data_handler
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
@ -169,17 +177,20 @@ class LocalAdaxDevice(ClimateEntity):
|
||||
return
|
||||
await self._adax_data_handler.set_target_temperature(temperature)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
data = await self._adax_data_handler.get_status()
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if data := self.coordinator.data:
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Constants for the Adax integration."""
|
||||
|
||||
import datetime
|
||||
from typing import Final
|
||||
|
||||
ACCOUNT_ID: Final = "account_id"
|
||||
@ -9,3 +10,5 @@ DOMAIN: Final = "adax"
|
||||
LOCAL = "Local"
|
||||
WIFI_SSID = "wifi_ssid"
|
||||
WIFI_PSWD = "wifi_pswd"
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
|
71
homeassistant/components/adax/coordinator.py
Normal file
71
homeassistant/components/adax/coordinator.py
Normal file
@ -0,0 +1,71 @@
|
||||
"""DataUpdateCoordinator for the Adax component."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from adax import Adax
|
||||
from adax_local import Adax as AdaxLocal
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ACCOUNT_ID, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type AdaxConfigEntry = ConfigEntry[AdaxCloudCoordinator | AdaxLocalCoordinator]
|
||||
|
||||
|
||||
class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
"""Coordinator for updating data to and from Adax (cloud)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Cloud mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxCloud",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Adax."""
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
return {r["id"]: r for r in rooms}
|
||||
|
||||
|
||||
class AdaxLocalCoordinator(DataUpdateCoordinator[dict[str, Any] | None]):
|
||||
"""Coordinator for updating data to and from Adax (local)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Local mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxLocal",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the Adax."""
|
||||
if result := await self.adax_data_handler.get_status():
|
||||
return cast(dict[str, Any], result)
|
||||
raise UpdateFailed("Got invalid status from device")
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "adax",
|
||||
"name": "Adax",
|
||||
"codeowners": ["@danielhiversen"],
|
||||
"codeowners": ["@danielhiversen", "@lazytarget"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||
"iot_class": "local_polling",
|
||||
|
@ -5,14 +5,14 @@
|
||||
"data": {
|
||||
"connection_type": "Select connection type"
|
||||
},
|
||||
"description": "Select connection type. Local requires heaters with bluetooth"
|
||||
"description": "Select connection type. Local requires heaters with Bluetooth"
|
||||
},
|
||||
"local": {
|
||||
"data": {
|
||||
"wifi_ssid": "Wi-Fi SSID",
|
||||
"wifi_pswd": "Wi-Fi Password"
|
||||
"wifi_pswd": "Wi-Fi password"
|
||||
},
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
},
|
||||
"cloud": {
|
||||
"data": {
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -14,6 +15,7 @@ from homeassistant.components.climate import (
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||
@ -49,6 +51,14 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||
ADVANTAGE_AIR_MYFAN = "autoAA"
|
||||
ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode"
|
||||
|
||||
HVAC_ACTIONS = {
|
||||
"cool": HVACAction.COOLING,
|
||||
"heat": HVACAction.HEATING,
|
||||
"vent": HVACAction.FAN,
|
||||
"dry": HVACAction.DRYING,
|
||||
}
|
||||
|
||||
HVAC_MODES = [
|
||||
HVACMode.OFF,
|
||||
@ -175,6 +185,17 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running HVAC action."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
if self._ac["mode"] == "myauto":
|
||||
return HVAC_ACTIONS.get(
|
||||
self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF)
|
||||
)
|
||||
return HVAC_ACTIONS.get(self._ac["mode"])
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan modes."""
|
||||
@ -273,6 +294,22 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
return HVACMode.HEAT_COOL
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF)
|
||||
if self._ac["mode"] == "myauto":
|
||||
master_action = HVAC_ACTIONS.get(
|
||||
str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF
|
||||
)
|
||||
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
|
||||
if self._zone["value"] <= Decimal(5):
|
||||
return HVACAction.IDLE
|
||||
return master_action
|
||||
return HVACAction.OFF
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
|
@ -7,3 +7,4 @@ ADVANTAGE_AIR_STATE_CLOSE = "close"
|
||||
ADVANTAGE_AIR_STATE_ON = "on"
|
||||
ADVANTAGE_AIR_STATE_OFF = "off"
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled"
|
||||
|
@ -41,7 +41,7 @@ async def async_setup_entry(
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
|
||||
)
|
||||
elif thing["channelDipState"] == 3: # 3 = "Garage door"
|
||||
elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door"
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
|
||||
)
|
||||
|
@ -8,7 +8,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
from .const import ADVANTAGE_AIR_STATE_ON, DOMAIN as ADVANTAGE_AIR_DOMAIN
|
||||
from .const import ADVANTAGE_AIR_STATE_ON, DOMAIN
|
||||
from .entity import AdvantageAirEntity, AdvantageAirThingEntity
|
||||
from .models import AdvantageAirData
|
||||
|
||||
@ -52,8 +52,8 @@ class AdvantageAirLight(AdvantageAirEntity, LightEntity):
|
||||
self._id: str = light["id"]
|
||||
self._attr_unique_id += f"-{self._id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(ADVANTAGE_AIR_DOMAIN, self._attr_unique_id)},
|
||||
via_device=(ADVANTAGE_AIR_DOMAIN, self.coordinator.data["system"]["rid"]),
|
||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||
via_device=(DOMAIN, self.coordinator.data["system"]["rid"]),
|
||||
manufacturer="Advantage Air",
|
||||
model=light.get("moduleType"),
|
||||
name=light["name"],
|
||||
|
@ -9,6 +9,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
from .const import (
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED,
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED,
|
||||
ADVANTAGE_AIR_STATE_OFF,
|
||||
ADVANTAGE_AIR_STATE_ON,
|
||||
)
|
||||
@ -32,6 +33,8 @@ async def async_setup_entry(
|
||||
entities.append(AdvantageAirFreshAir(instance, ac_key))
|
||||
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirMyFan(instance, ac_key))
|
||||
if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirNightMode(instance, ac_key))
|
||||
if things := instance.coordinator.data.get("myThings"):
|
||||
entities.extend(
|
||||
AdvantageAirRelay(instance, thing)
|
||||
@ -93,6 +96,32 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity):
|
||||
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity):
|
||||
"""Representation of Advantage 'MySleep$aver' Mode control."""
|
||||
|
||||
_attr_icon = "mdi:weather-night"
|
||||
_attr_name = "MySleep$aver"
|
||||
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||
"""Initialize an Advantage Air Night Mode control."""
|
||||
super().__init__(instance, ac_key)
|
||||
self._attr_unique_id += "-nightmode"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the Night Mode status."""
|
||||
return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode on."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True})
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode off."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
|
||||
"""Representation of Advantage Air Thing."""
|
||||
|
||||
|
@ -6,7 +6,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
from .const import DOMAIN as ADVANTAGE_AIR_DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .entity import AdvantageAirEntity
|
||||
from .models import AdvantageAirData
|
||||
|
||||
@ -32,9 +32,7 @@ class AdvantageAirApp(AdvantageAirEntity, UpdateEntity):
|
||||
"""Initialize the Advantage Air App."""
|
||||
super().__init__(instance)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(ADVANTAGE_AIR_DOMAIN, self.coordinator.data["system"]["rid"])
|
||||
},
|
||||
identifiers={(DOMAIN, self.coordinator.data["system"]["rid"])},
|
||||
manufacturer="Advantage Air",
|
||||
model=self.coordinator.data["system"]["sysType"],
|
||||
name=self.coordinator.data["system"]["name"],
|
||||
|
@ -51,7 +51,7 @@
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "The {integration_title} YAML configuration import failed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN as AGENT_DOMAIN, SERVER_URL
|
||||
from .const import DOMAIN, SERVER_URL
|
||||
|
||||
ATTRIBUTION = "ispyconnect.com"
|
||||
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
|
||||
@ -46,7 +46,7 @@ async def async_setup_entry(
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
identifiers={(AGENT_DOMAIN, agent_client.unique)},
|
||||
identifiers={(DOMAIN, agent_client.unique)},
|
||||
manufacturer="iSpyConnect",
|
||||
name=f"Agent {agent_client.name}",
|
||||
model="Agent DVR",
|
||||
|
@ -12,7 +12,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import DOMAIN as AGENT_DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
CONF_HOME_MODE_NAME = "home"
|
||||
CONF_AWAY_MODE_NAME = "away"
|
||||
@ -47,7 +47,7 @@ class AgentBaseStation(AlarmControlPanelEntity):
|
||||
self._client = client
|
||||
self._attr_unique_id = f"{client.unique}_CP"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(AGENT_DOMAIN, client.unique)},
|
||||
identifiers={(DOMAIN, client.unique)},
|
||||
name=f"{client.name} {CONST_ALARM_CONTROL_PANEL_NAME}",
|
||||
manufacturer="Agent",
|
||||
model=CONST_ALARM_CONTROL_PANEL_NAME,
|
||||
|
@ -6,6 +6,7 @@ from typing import Any, Concatenate
|
||||
from airgradient import AirGradientConnectionError, AirGradientError, get_model_name
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@ -29,6 +30,7 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
|
||||
model_id=measures.model,
|
||||
serial_number=coordinator.serial_number,
|
||||
sw_version=measures.firmware_version,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, coordinator.serial_number)},
|
||||
)
|
||||
|
||||
|
||||
|
@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {model}?"
|
||||
"description": "Do you want to set up {model}?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
@ -68,8 +68,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "LED bar mode",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"co2": "Carbon dioxide",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"pm": "Particulate matter"
|
||||
}
|
||||
},
|
||||
@ -143,8 +143,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "[%key:component::airgradient::entity::select::led_bar_mode::name%]",
|
||||
"state": {
|
||||
"off": "[%key:component::airgradient::entity::select::led_bar_mode::state::off%]",
|
||||
"co2": "[%key:component::airgradient::entity::select::led_bar_mode::state::co2%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"pm": "[%key:component::airgradient::entity::select::led_bar_mode::state::pm%]"
|
||||
}
|
||||
},
|
||||
|
@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
try:
|
||||
await measurements.update()
|
||||
except (AirlyError, ClientConnectorError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"entry": self.config_entry.title,
|
||||
"error": repr(error),
|
||||
},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug(
|
||||
"Requests remaining: %s/%s",
|
||||
@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
standards = measurements.current["standards"]
|
||||
|
||||
if index["description"] == NO_AIRLY_SENSORS:
|
||||
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_station",
|
||||
translation_placeholders={"entry": self.config_entry.title},
|
||||
)
|
||||
for value in values:
|
||||
data[value["name"]] = value["value"]
|
||||
for standard in standards:
|
||||
|
@ -36,5 +36,13 @@
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
|
||||
},
|
||||
"no_station": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from pyairnow import WebServiceAPI
|
||||
from pyairnow.conv import aqi_to_concentration
|
||||
from pyairnow.errors import AirNowError
|
||||
from pyairnow.errors import AirNowError, InvalidJsonError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
distance=self.distance,
|
||||
)
|
||||
|
||||
except (AirNowError, ClientConnectorError) as error:
|
||||
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
if not obs:
|
||||
|
@ -7,7 +7,7 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"radius": "Station Radius (miles; optional)"
|
||||
"radius": "Station radius (miles; optional)"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -25,7 +25,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"radius": "Station Radius (miles)"
|
||||
"radius": "Station radius (miles)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +91,7 @@
|
||||
"name": "Hydrogen fluoride"
|
||||
},
|
||||
"health_index": {
|
||||
"name": "Health Index"
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
@ -112,10 +112,10 @@
|
||||
"name": "Oxygen"
|
||||
},
|
||||
"performance_index": {
|
||||
"name": "Performance Index"
|
||||
"name": "Performance index"
|
||||
},
|
||||
"hydrogen_phosphide": {
|
||||
"name": "Hydrogen Phosphide"
|
||||
"name": "Hydrogen phosphide"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
@ -127,22 +127,22 @@
|
||||
"name": "Refrigerant"
|
||||
},
|
||||
"silicon_hydride": {
|
||||
"name": "Silicon Hydride"
|
||||
"name": "Silicon hydride"
|
||||
},
|
||||
"noise": {
|
||||
"name": "Noise"
|
||||
},
|
||||
"maximum_noise": {
|
||||
"name": "Noise (Maximum)"
|
||||
"name": "Noise (maximum)"
|
||||
},
|
||||
"radon": {
|
||||
"name": "Radon"
|
||||
},
|
||||
"industrial_volatile_organic_compounds": {
|
||||
"name": "VOCs (Industrial)"
|
||||
"name": "VOCs (industrial)"
|
||||
},
|
||||
"virus_index": {
|
||||
"name": "Virus Index"
|
||||
"name": "Virus index"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,19 @@
|
||||
"name": "Airthings",
|
||||
"codeowners": ["@danielhiversen", "@LaStrada"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "airthings-view"
|
||||
},
|
||||
{
|
||||
"hostname": "airthings-hub",
|
||||
"macaddress": "D0141190*"
|
||||
},
|
||||
{
|
||||
"hostname": "airthings-hub",
|
||||
"macaddress": "70B3D52A0*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["airthings"],
|
||||
|
@ -14,6 +14,7 @@ from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS,
|
||||
EntityCategory,
|
||||
@ -78,6 +79,12 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
|
@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
||||
try:
|
||||
await client.test_connection()
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors = {"base": "cannot_connect"}
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_HOST])
|
||||
|
@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"geography_by_coords": {
|
||||
"title": "Configure a Geography",
|
||||
"title": "Configure a geography",
|
||||
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
@ -16,8 +16,8 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"city": "City",
|
||||
"country": "Country",
|
||||
"state": "State"
|
||||
"state": "State",
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@ -56,12 +56,12 @@
|
||||
"sensor": {
|
||||
"pollutant_label": {
|
||||
"state": {
|
||||
"co": "Carbon Monoxide",
|
||||
"n2": "Nitrogen Dioxide",
|
||||
"o3": "Ozone",
|
||||
"p1": "PM10",
|
||||
"p2": "PM2.5",
|
||||
"s2": "Sulfur Dioxide"
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
}
|
||||
},
|
||||
"pollutant_level": {
|
||||
|
@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
}
|
||||
|
@ -9,6 +9,8 @@ from aioairzone.const import (
|
||||
AZD_HUMIDITY,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_UNIT,
|
||||
AZD_THERMOSTAT_BATTERY,
|
||||
AZD_THERMOSTAT_SIGNAL,
|
||||
AZD_WEBSERVER,
|
||||
AZD_WIFI_RSSI,
|
||||
AZD_ZONES,
|
||||
@ -73,6 +75,20 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
key=AZD_THERMOSTAT_BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
key=AZD_THERMOSTAT_SIGNAL,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="thermostat_signal",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -76,6 +76,9 @@
|
||||
"sensor": {
|
||||
"rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"thermostat_signal": {
|
||||
"name": "Signal strength"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
}
|
||||
|
@ -32,9 +32,9 @@
|
||||
"air_quality": {
|
||||
"name": "Air Quality mode",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"auto": "Auto"
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
}
|
||||
},
|
||||
"modes": {
|
||||
|
@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Choose AlarmDecoder Protocol",
|
||||
"title": "Choose AlarmDecoder protocol",
|
||||
"data": {
|
||||
"protocol": "Protocol"
|
||||
}
|
||||
@ -12,8 +12,8 @@
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"device_baudrate": "Device Baud Rate",
|
||||
"device_path": "Device Path"
|
||||
"device_baudrate": "Device baud rate",
|
||||
"device_path": "Device path"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the AlarmDecoder device that is connected to your alarm panel.",
|
||||
@ -44,36 +44,36 @@
|
||||
"arm_settings": {
|
||||
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
|
||||
"data": {
|
||||
"auto_bypass": "Auto Bypass on Arm",
|
||||
"code_arm_required": "Code Required for Arming",
|
||||
"alt_night_mode": "Alternative Night Mode"
|
||||
"auto_bypass": "Auto-bypass on arm",
|
||||
"code_arm_required": "Code required for arming",
|
||||
"alt_night_mode": "Alternative night mode"
|
||||
}
|
||||
},
|
||||
"zone_select": {
|
||||
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
|
||||
"description": "Enter the zone number you'd like to to add, edit, or remove.",
|
||||
"data": {
|
||||
"zone_number": "Zone Number"
|
||||
"zone_number": "Zone number"
|
||||
}
|
||||
},
|
||||
"zone_details": {
|
||||
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
|
||||
"description": "Enter details for zone {zone_number}. To delete zone {zone_number}, leave Zone Name blank.",
|
||||
"description": "Enter details for zone {zone_number}. To delete zone {zone_number}, leave 'Zone name' blank.",
|
||||
"data": {
|
||||
"zone_name": "Zone Name",
|
||||
"zone_type": "Zone Type",
|
||||
"zone_rfid": "RF Serial",
|
||||
"zone_loop": "RF Loop",
|
||||
"zone_relayaddr": "Relay Address",
|
||||
"zone_relaychan": "Relay Channel"
|
||||
"zone_name": "Zone name",
|
||||
"zone_type": "Zone type",
|
||||
"zone_rfid": "RF serial",
|
||||
"zone_loop": "RF loop",
|
||||
"zone_relayaddr": "Relay address",
|
||||
"zone_relaychan": "Relay channel"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"relay_inclusive": "Relay Address and Relay Channel are codependent and must be included together.",
|
||||
"relay_inclusive": "'Relay address' and 'Relay channel' are codependent and must be included together.",
|
||||
"int": "The field below must be an integer.",
|
||||
"loop_rfid": "RF Loop cannot be used without RF Serial.",
|
||||
"loop_range": "RF Loop must be an integer between 1 and 4."
|
||||
"loop_rfid": "'RF loop' cannot be used without 'RF serial'.",
|
||||
"loop_range": "'RF loop' must be an integer between 1 and 4."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@ -195,7 +195,8 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
|
||||
# Fan preset_mode
|
||||
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
||||
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
|
||||
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
||||
|
||||
# Humidifier mode
|
||||
|
@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
||||
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
|
||||
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
|
||||
class MediaPlayerCapabilities(AlexaEntity):
|
||||
"""Class to represent MediaPlayer capabilities."""
|
||||
|
||||
@ -757,9 +757,7 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
|
||||
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
|
||||
inputs = AlexaInputController.get_valid_inputs(
|
||||
self.entity.attributes.get(
|
||||
media_player.const.ATTR_INPUT_SOURCE_LIST, []
|
||||
)
|
||||
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaInputController(self.entity)
|
||||
@ -776,8 +774,7 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
and domain != "denonavr"
|
||||
):
|
||||
inputs = AlexaEqualizerController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
or []
|
||||
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaEqualizerController(self.entity)
|
||||
|
@ -566,7 +566,7 @@ async def async_api_set_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@ -589,7 +589,7 @@ async def async_api_select_input(
|
||||
|
||||
# Attempt to map the ALL UPPERCASE payload name to a source.
|
||||
# Strips trailing 1 to match single input devices.
|
||||
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
|
||||
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
|
||||
for source in source_list:
|
||||
formatted_source = (
|
||||
source.lower().replace("-", "").replace("_", "").replace(" ", "")
|
||||
@ -611,7 +611,7 @@ async def async_api_select_input(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_INPUT_SOURCE: media_input,
|
||||
media_player.ATTR_INPUT_SOURCE: media_input,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@ -636,7 +636,7 @@ async def async_api_adjust_volume(
|
||||
volume_delta = int(directive.payload["volume"])
|
||||
|
||||
entity = directive.entity
|
||||
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
|
||||
# read current state
|
||||
try:
|
||||
@ -648,7 +648,7 @@ async def async_api_adjust_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@ -709,7 +709,7 @@ async def async_api_set_mute(
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@ -1708,15 +1708,13 @@ async def async_api_changechannel(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
|
||||
media_player.const.MEDIA_TYPE_CHANNEL
|
||||
),
|
||||
media_player.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
media_player.const.SERVICE_PLAY_MEDIA,
|
||||
media_player.SERVICE_PLAY_MEDIA,
|
||||
data,
|
||||
blocking=False,
|
||||
context=context,
|
||||
@ -1825,13 +1823,13 @@ async def async_api_set_eq_mode(
|
||||
context: ha.Context,
|
||||
) -> AlexaResponse:
|
||||
"""Process a SetMode request for EqualizerController."""
|
||||
mode = directive.payload["mode"]
|
||||
mode: str = directive.payload["mode"]
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
|
||||
if sound_mode_list and mode.lower() in sound_mode_list:
|
||||
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
|
||||
data[media_player.ATTR_SOUND_MODE] = mode.lower()
|
||||
else:
|
||||
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
|
||||
raise AlexaInvalidValueError(msg)
|
||||
|
@ -3,10 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
@ -260,10 +260,10 @@ async def async_enable_proactive_mode(
|
||||
def extra_significant_check(
|
||||
hass: HomeAssistant,
|
||||
old_state: str,
|
||||
old_attrs: dict[Any, Any] | MappingProxyType[Any, Any],
|
||||
old_attrs: Mapping[Any, Any],
|
||||
old_extra_arg: Any,
|
||||
new_state: str,
|
||||
new_attrs: dict[str, Any] | MappingProxyType[Any, Any],
|
||||
new_attrs: Mapping[Any, Any],
|
||||
new_extra_arg: Any,
|
||||
) -> bool:
|
||||
"""Check if the serialized data has changed."""
|
||||
|
@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["boto3==1.34.131"]
|
||||
"requirements": ["boto3==1.37.1"]
|
||||
}
|
||||
|
@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDGUSTMPH,
|
||||
|
@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="wind_direction",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR_AVG10M,
|
||||
|
@ -8,7 +8,7 @@ from python_homeassistant_analytics import (
|
||||
HomeassistantAnalyticsClient,
|
||||
HomeassistantAnalyticsConnectionError,
|
||||
)
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
|
@ -3,12 +3,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_addons": "Addons",
|
||||
"tracked_addons": "Add-ons",
|
||||
"tracked_integrations": "Integrations",
|
||||
"tracked_custom_integrations": "Custom integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "Select the addons you want to track",
|
||||
"tracked_addons": "Select the add-ons you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track"
|
||||
}
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pydroid-ipcam==2.0.0"]
|
||||
"requirements": ["pydroid-ipcam==3.0.0"]
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
@ -85,5 +85,5 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.2.0"],
|
||||
"requirements": ["androidtvremote2==0.2.1"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AndroidTVRemoteConfigEntry
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, DOMAIN
|
||||
from .entity import AndroidTVRemoteBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -233,5 +233,5 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
await asyncio.sleep(delay_secs)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
@ -54,5 +54,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from anova_wifi import AnovaApi, InvalidLogin
|
||||
import voluptuous as vol
|
||||
|
||||
@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Sets up a config flow for Anova."""
|
||||
|
||||
VERSION = 1
|
||||
@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
await api.authenticate()
|
||||
except InvalidLogin:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
|
@ -22,6 +22,7 @@ from . import AnthemavConfigEntry
|
||||
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
VOLUME_STEP = 0.01
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity):
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
)
|
||||
_attr_volume_step = VOLUME_STEP
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@ -26,12 +26,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
@ -34,10 +35,12 @@ from .const import (
|
||||
CONF_PROMPT,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -50,7 +53,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
@ -63,12 +66,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@ -133,25 +131,36 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
if not errors:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
else:
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
if (
|
||||
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
||||
) and isinstance(suggested_llm_apis, str):
|
||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(anthropic_config_option_schema(self.hass, options)),
|
||||
@ -161,33 +170,28 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
options: dict[str, Any] | MappingProxyType[str, Any],
|
||||
options: Mapping[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label="No control",
|
||||
value="none",
|
||||
)
|
||||
]
|
||||
hass_apis.extend(
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
)
|
||||
]
|
||||
|
||||
schema = {
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
@ -210,6 +214,10 @@ def anthropic_config_option_schema(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||
|
@ -1,27 +1,39 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
Message,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@ -30,7 +42,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@ -39,11 +51,15 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@ -71,73 +87,102 @@ def _format_tool(
|
||||
)
|
||||
|
||||
|
||||
def _message_convert(
|
||||
message: Message,
|
||||
) -> MessageParam:
|
||||
"""Convert from class to TypedDict."""
|
||||
param_content: list[TextBlockParam | ToolUseBlockParam] = []
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for message_content in message.content:
|
||||
if isinstance(message_content, TextBlock):
|
||||
param_content.append(TextBlockParam(type="text", text=message_content.text))
|
||||
elif isinstance(message_content, ToolUseBlock):
|
||||
param_content.append(
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=message_content.id,
|
||||
name=message_content.name,
|
||||
input=message_content.input,
|
||||
)
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
|
||||
return MessageParam(role=message.role, content=param_content)
|
||||
|
||||
|
||||
def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
||||
"""Create tool response content."""
|
||||
if isinstance(chat_content, conversation.ToolResultContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=[
|
||||
ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=chat_content.tool_call_id,
|
||||
content=json.dumps(chat_content.tool_result),
|
||||
)
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.AssistantContent):
|
||||
return MessageParam(
|
||||
role="assistant",
|
||||
content=[
|
||||
TextBlockParam(type="text", text=chat_content.content or ""),
|
||||
*[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
for tool_call in chat_content.tool_calls or ()
|
||||
],
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.UserContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=chat_content.content,
|
||||
)
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
@ -151,44 +196,127 @@ async def _transform_stream(
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_tool_call: dict | None = None
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_tool_call = {
|
||||
"id": response.content_block.id,
|
||||
"name": response.content_block.name,
|
||||
"input": "",
|
||||
}
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
if current_tool_call is None:
|
||||
raise ValueError("Unexpected delta without a tool call")
|
||||
current_tool_call["input"] += response.delta.partial_json
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
LOGGER.debug("yielding delta: %s", response.delta.text)
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_call:
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_call["id"],
|
||||
tool_name=current_tool_call["name"],
|
||||
tool_args=json.loads(current_tool_call["input"]),
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
current_tool_call = None
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
@ -226,18 +354,6 @@ class AnthropicConversationEntity(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_process(
|
||||
self, user_input: conversation.ConversationInput
|
||||
) -> conversation.ConversationResult:
|
||||
"""Process a sentence."""
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
) as session,
|
||||
conversation.async_get_chat_log(self.hass, session, user_input) as chat_log,
|
||||
):
|
||||
return await self._async_handle_message(user_input, chat_log)
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@ -266,34 +382,51 @@ class AnthropicConversationEntity(
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = [_convert_content(content) for content in chat_log.content[1:]]
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
stream = await client.messages.create(
|
||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||
messages=messages,
|
||||
tools=tools or NOT_GIVEN,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
system=system.content,
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
stream=True,
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
[
|
||||
_convert_content(content)
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream)
|
||||
)
|
||||
]
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
@ -305,7 +438,9 @@ class AnthropicConversationEntity(
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=chat_log.conversation_id
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
|
@ -23,12 +23,17 @@
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings"
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -53,10 +53,8 @@ class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||
"""Initialize the APCUPSd binary device."""
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
|
@ -85,11 +85,16 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
self._host = host
|
||||
self._port = port
|
||||
|
||||
@property
|
||||
def unique_device_id(self) -> str:
|
||||
"""Return a unique ID of the device, which is the serial number (if available) or the config entry ID."""
|
||||
return self.data.serial_no or self.config_entry.entry_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.data.serial_no or self.config_entry.entry_id)},
|
||||
identifiers={(DOMAIN, self.unique_device_id)},
|
||||
model=self.data.model,
|
||||
manufacturer="APC",
|
||||
name=self.data.name or "APC UPS",
|
||||
@ -108,4 +113,7 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
data = await aioapcaccess.request_status(self._host, self._port)
|
||||
return APCUPSdData(data)
|
||||
except (OSError, asyncio.IncompleteReadError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from error
|
||||
|
@ -458,11 +458,8 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
# Initial update of attributes.
|
||||
|
@ -57,7 +57,7 @@
|
||||
"name": "Status date"
|
||||
},
|
||||
"dip_switch_settings": {
|
||||
"name": "Dip switch settings"
|
||||
"name": "DIP switch settings"
|
||||
},
|
||||
"low_battery_signal": {
|
||||
"name": "Low battery signal"
|
||||
@ -93,7 +93,7 @@
|
||||
"name": "Internal temperature"
|
||||
},
|
||||
"last_self_test": {
|
||||
"name": "Last self test"
|
||||
"name": "Last self-test"
|
||||
},
|
||||
"last_transfer": {
|
||||
"name": "Last transfer"
|
||||
@ -177,7 +177,7 @@
|
||||
"name": "Restore requirement"
|
||||
},
|
||||
"self_test_result": {
|
||||
"name": "Self test result"
|
||||
"name": "Self-test result"
|
||||
},
|
||||
"sensitivity": {
|
||||
"name": "Sensitivity"
|
||||
@ -195,7 +195,7 @@
|
||||
"name": "Status"
|
||||
},
|
||||
"self_test_interval": {
|
||||
"name": "Self test interval"
|
||||
"name": "Self-test interval"
|
||||
},
|
||||
"time_left": {
|
||||
"name": "Time left"
|
||||
@ -219,5 +219,10 @@
|
||||
"name": "Transfer to battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1
homeassistant/components/apollo_automation/__init__.py
Normal file
1
homeassistant/components/apollo_automation/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
"""Virtual integration: Apollo Automation."""
|
6
homeassistant/components/apollo_automation/manifest.json
Normal file
6
homeassistant/components/apollo_automation/manifest.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "apollo_automation",
|
||||
"name": "Apollo Automation",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "esphome"
|
||||
}
|
@ -20,6 +20,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
@ -381,7 +382,9 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_IDENTIFIERS: list(combined_identifiers),
|
||||
},
|
||||
)
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
# Don't reload ignored entries or in the middle of reauth,
|
||||
# e.g. if the user is entering a new PIN
|
||||
if entry.source != SOURCE_IGNORE and self.source != SOURCE_REAUTH:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
@ -120,6 +120,7 @@ class AppleTvMediaPlayer(
|
||||
"""Initialize the Apple TV media player."""
|
||||
super().__init__(name, identifier, manager)
|
||||
self._playing: Playing | None = None
|
||||
self._playing_last_updated: datetime | None = None
|
||||
self._app_list: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
@ -209,6 +210,7 @@ class AppleTvMediaPlayer(
|
||||
This is a callback function from pyatv.interface.PushListener.
|
||||
"""
|
||||
self._playing = playstatus
|
||||
self._playing_last_updated = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
@ -316,7 +318,7 @@ class AppleTvMediaPlayer(
|
||||
def media_position_updated_at(self) -> datetime | None:
|
||||
"""Last valid time of media position."""
|
||||
if self.state in {MediaPlayerState.PLAYING, MediaPlayerState.PAUSED}:
|
||||
return dt_util.utcnow()
|
||||
return self._playing_last_updated
|
||||
return None
|
||||
|
||||
async def async_play_media(
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
|
||||
config_entry: ApSystemsConfigEntry
|
||||
device_version: str
|
||||
battery_system: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -68,6 +69,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
self.battery_system = device_info.isBatterySystem
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
}
|
||||
|
@ -21,7 +21,7 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"off_grid_status": {
|
||||
"name": "Off grid status"
|
||||
"name": "Off-grid status"
|
||||
},
|
||||
"dc_1_short_circuit_error_status": {
|
||||
"name": "DC 1 short circuit error status"
|
||||
|
@ -36,6 +36,8 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
super().__init__(data)
|
||||
self._api = data.coordinator.api
|
||||
self._attr_unique_id = f"{data.device_id}_inverter_status"
|
||||
if data.coordinator.battery_system:
|
||||
self._attr_available = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update switch status and availability."""
|
||||
|
@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
|
@ -36,9 +36,9 @@
|
||||
"wi_fi_strength": {
|
||||
"name": "Wi-Fi strength",
|
||||
"state": {
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"high": "High"
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@
|
||||
"sensor": {
|
||||
"threshold": {
|
||||
"state": {
|
||||
"error": "Error",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"green": "Green",
|
||||
"yellow": "Yellow",
|
||||
"red": "Red"
|
||||
|
@ -6,7 +6,11 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
|
||||
DEGREE,
|
||||
"mdi:compass",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
]
|
||||
return None
|
||||
@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity):
|
||||
units: str,
|
||||
icon: str | None = None,
|
||||
device_class: SensorDeviceClass | None = None,
|
||||
state_class: SensorStateClass | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_id = _slug(name)
|
||||
@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity):
|
||||
self._attr_native_unit_of_measurement = units
|
||||
self._attr_icon = icon
|
||||
self._attr_device_class = device_class
|
||||
self._attr_state_class = state_class
|
||||
|
||||
def set_event(self, event: dict[str, Any]) -> None:
|
||||
"""Update the sensor with the most recent event."""
|
||||
|
@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
|
||||
"""
|
||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||
pipeline_input = PipelineInput(
|
||||
conversation_id=session.conversation_id,
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
|
@ -19,14 +19,7 @@ import wave
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
conversation,
|
||||
media_source,
|
||||
stt,
|
||||
tts,
|
||||
wake_word,
|
||||
websocket_api,
|
||||
)
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@ -129,7 +125,7 @@ SAVE_DELAY = 10
|
||||
@callback
|
||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||
"""Filter out intents that are not local fallback."""
|
||||
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
|
||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||
|
||||
|
||||
@callback
|
||||
@ -566,8 +562,7 @@ class PipelineRun:
|
||||
|
||||
id: str = field(default_factory=ulid_util.ulid_now)
|
||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||
tts_engine: str = field(init=False, repr=False)
|
||||
tts_options: dict | None = field(init=False, default=None)
|
||||
tts_stream: tts.ResultStream | None = field(init=False, default=None)
|
||||
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
||||
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
||||
|
||||
@ -590,6 +585,12 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@ -639,13 +640,19 @@ class PipelineRun:
|
||||
self._device_id = device_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data = {
|
||||
data: dict[str, Any] = {
|
||||
"pipeline": self.pipeline.id,
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
|
||||
@ -1007,19 +1014,36 @@ class PipelineRun:
|
||||
|
||||
yield chunk.audio
|
||||
|
||||
async def prepare_recognize_intent(self) -> None:
|
||||
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
|
||||
"""Prepare recognizing an intent."""
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
self._conversation_data = async_get_pipeline_conversation_data(
|
||||
self.hass, session
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
if self._conversation_data.continue_conversation_agent is not None:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass, self._conversation_data.continue_conversation_agent
|
||||
)
|
||||
self._conversation_data.continue_conversation_agent = None
|
||||
if agent_info is None:
|
||||
raise IntentRecognitionError(
|
||||
code="intent-agent-not-found",
|
||||
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
|
||||
)
|
||||
self._intent_agent_only = True
|
||||
|
||||
else:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
@ -1031,7 +1055,7 @@ class PipelineRun:
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
@ -1078,7 +1102,7 @@ class PipelineRun:
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally:
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
@ -1103,12 +1127,16 @@ class PipelineRun:
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
@ -1191,6 +1219,9 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@ -1213,36 +1244,31 @@ class PipelineRun:
|
||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
||||
|
||||
try:
|
||||
options_supported = await tts.async_support_options(
|
||||
self.hass,
|
||||
engine,
|
||||
self.pipeline.tts_language,
|
||||
tts_options,
|
||||
self.tts_stream = tts.async_create_stream(
|
||||
hass=self.hass,
|
||||
engine=engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=f"Text-to-speech engine '{engine}' not found",
|
||||
) from err
|
||||
if not options_supported:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=(
|
||||
f"Text-to-speech engine {engine} "
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
|
||||
f" {err}"
|
||||
),
|
||||
)
|
||||
|
||||
self.tts_engine = engine
|
||||
self.tts_options = tts_options
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.TTS_START,
|
||||
{
|
||||
"engine": self.tts_engine,
|
||||
"engine": self.tts_stream.engine,
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
@ -1255,14 +1281,9 @@ class PipelineRun:
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=self.tts_options,
|
||||
)
|
||||
tts_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
tts_media_id,
|
||||
None,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
@ -1271,10 +1292,13 @@ class PipelineRun:
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
_LOGGER.debug("TTS result %s", tts_media)
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
**asdict(tts_media),
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(
|
||||
@ -1454,8 +1478,8 @@ class PipelineInput:
|
||||
|
||||
run: PipelineRun
|
||||
|
||||
conversation_id: str
|
||||
"""Identifier for the conversation."""
|
||||
session: chat_session.ChatSession
|
||||
"""Session for the conversation."""
|
||||
|
||||
stt_metadata: stt.SpeechMetadata | None = None
|
||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||
@ -1480,7 +1504,9 @@ class PipelineInput:
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||
@ -1564,7 +1590,7 @@ class PipelineInput:
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
@ -1648,7 +1674,7 @@ class PipelineInput:
|
||||
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
||||
<= end_stage_index
|
||||
):
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent())
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
|
||||
|
||||
if (
|
||||
start_stage_index
|
||||
@ -1927,7 +1953,7 @@ class PipelineRunDebug:
|
||||
|
||||
|
||||
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
"""Store entity registry data."""
|
||||
"""Store pipeline data."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
@ -2009,3 +2035,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
||||
continue_conversation_agent: str | None = None
|
||||
"""The agent that requested the conversation to be continued."""
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_pipeline_conversation_data(
|
||||
hass: HomeAssistant, session: chat_session.ChatSession
|
||||
) -> PipelineConversationData:
|
||||
"""Get the pipeline data for a specific conversation."""
|
||||
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
|
||||
if all_conversation_data is None:
|
||||
all_conversation_data = {}
|
||||
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
|
||||
|
||||
data = all_conversation_data.get(session.conversation_id)
|
||||
|
||||
if data is not None:
|
||||
return data
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_conversation_data.pop(session.conversation_id)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
|
||||
return data
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user