mirror of
https://github.com/home-assistant/core.git
synced 2025-04-26 02:07:54 +00:00
2025.4.0 (#141505)
This commit is contained in:
commit
e8b2a3de8b
36
.github/workflows/builder.yml
vendored
36
.github/workflows/builder.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ jobs:
|
|||||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||||
|
|
||||||
- name: Upload translations
|
- name: Upload translations
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
path: translations.tar.gz
|
path: translations.tar.gz
|
||||||
@ -94,7 +94,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v8
|
uses: dawidd6/action-download-artifact@v9
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/frontend
|
repo: home-assistant/frontend
|
||||||
@ -105,7 +105,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of intents
|
- name: Download nightly wheels of intents
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v8
|
uses: dawidd6/action-download-artifact@v9
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/intents-package
|
repo: home-assistant/intents-package
|
||||||
@ -116,7 +116,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ jobs:
|
|||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@ -190,14 +190,14 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2025.02.0
|
uses: home-assistant/builder@2025.03.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@ -256,14 +256,14 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2025.02.0
|
uses: home-assistant/builder@2025.03.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@ -330,14 +330,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -457,12 +457,12 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@ -502,14 +502,14 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -522,7 +522,7 @@ jobs:
|
|||||||
- name: Push Docker image
|
- name: Push Docker image
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
id: push
|
id: push
|
||||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -531,7 +531,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate artifact attestation
|
- name: Generate artifact attestation
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
subject-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
231
.github/workflows/ci.yaml
vendored
231
.github/workflows/ci.yaml
vendored
@ -37,10 +37,10 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 11
|
CACHE_VERSION: 12
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 9
|
MYPY_CACHE_VERSION: 9
|
||||||
HA_SHORT_VERSION: "2025.3"
|
HA_SHORT_VERSION: "2025.4"
|
||||||
DEFAULT_PYTHON: "3.13"
|
DEFAULT_PYTHON: "3.13"
|
||||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@ -89,6 +89,7 @@ jobs:
|
|||||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||||
tests: ${{ steps.info.outputs.tests }}
|
tests: ${{ steps.info.outputs.tests }}
|
||||||
|
lint_only: ${{ steps.info.outputs.lint_only }}
|
||||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
@ -142,6 +143,7 @@ jobs:
|
|||||||
test_group_count=10
|
test_group_count=10
|
||||||
tests="[]"
|
tests="[]"
|
||||||
tests_glob=""
|
tests_glob=""
|
||||||
|
lint_only=""
|
||||||
skip_coverage=""
|
skip_coverage=""
|
||||||
|
|
||||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||||
@ -192,6 +194,17 @@ jobs:
|
|||||||
test_full_suite="true"
|
test_full_suite="true"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event_name }}" == "push" \
|
||||||
|
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||||
|
then
|
||||||
|
lint_only="true"
|
||||||
|
skip_coverage="true"
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||||
then
|
then
|
||||||
@ -217,6 +230,8 @@ jobs:
|
|||||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||||
echo "tests_glob: ${tests_glob}"
|
echo "tests_glob: ${tests_glob}"
|
||||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||||
|
echo "lint_only": ${lint_only}
|
||||||
|
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
|
||||||
echo "skip_coverage: ${skip_coverage}"
|
echo "skip_coverage: ${skip_coverage}"
|
||||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
@ -234,13 +249,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.1
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@ -256,7 +271,7 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.1
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@ -279,14 +294,14 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -295,7 +310,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -319,14 +334,14 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -335,7 +350,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -359,14 +374,14 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -375,7 +390,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -469,7 +484,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -482,7 +497,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.1
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@ -490,7 +505,7 @@ jobs:
|
|||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.2.1
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
@ -537,7 +552,7 @@ jobs:
|
|||||||
python --version
|
python --version
|
||||||
uv pip freeze >> pip_freeze.txt
|
uv pip freeze >> pip_freeze.txt
|
||||||
- name: Upload pip_freeze artifact
|
- name: Upload pip_freeze artifact
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pip-freeze-${{ matrix.python-version }}
|
name: pip-freeze-${{ matrix.python-version }}
|
||||||
path: pip_freeze.txt
|
path: pip_freeze.txt
|
||||||
@ -572,13 +587,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -605,13 +620,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -623,6 +638,25 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.gen_requirements_all validate
|
python -m script.gen_requirements_all validate
|
||||||
|
|
||||||
|
dependency-review:
|
||||||
|
name: Dependency review
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- info
|
||||||
|
- base
|
||||||
|
if: |
|
||||||
|
github.event.inputs.pylint-only != 'true'
|
||||||
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
|
&& needs.info.outputs.requirements == 'true'
|
||||||
|
&& github.event_name == 'pull_request'
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.2.2
|
||||||
|
- name: Dependency review
|
||||||
|
uses: actions/dependency-review-action@v4.5.0
|
||||||
|
with:
|
||||||
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
audit-licenses:
|
audit-licenses:
|
||||||
name: Audit licenses
|
name: Audit licenses
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -643,13 +677,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -661,7 +695,7 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||||
- name: Upload licenses
|
- name: Upload licenses
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||||
path: licenses-${{ matrix.python-version }}.json
|
path: licenses-${{ matrix.python-version }}.json
|
||||||
@ -686,13 +720,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -733,13 +767,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -778,7 +812,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -791,7 +825,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -799,7 +833,7 @@ jobs:
|
|||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.2.1
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
@ -829,11 +863,7 @@ jobs:
|
|||||||
prepare-pytest-full:
|
prepare-pytest-full:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.test_full_suite == 'true'
|
&& needs.info.outputs.test_full_suite == 'true'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -859,13 +889,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -877,7 +907,7 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||||
- name: Upload pytest_buckets
|
- name: Upload pytest_buckets
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
path: pytest_buckets.txt
|
path: pytest_buckets.txt
|
||||||
@ -886,11 +916,7 @@ jobs:
|
|||||||
pytest-full:
|
pytest-full:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.test_full_suite == 'true'
|
&& needs.info.outputs.test_full_suite == 'true'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -923,13 +949,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -942,7 +968,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||||
- name: Download pytest_buckets
|
- name: Download pytest_buckets
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
- name: Compile English translations
|
- name: Compile English translations
|
||||||
@ -962,6 +988,7 @@ jobs:
|
|||||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||||
cov_params+=(--cov="homeassistant")
|
cov_params+=(--cov="homeassistant")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||||
@ -980,18 +1007,24 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
|
path: junit.xml
|
||||||
- name: Remove pytest_buckets
|
- name: Remove pytest_buckets
|
||||||
run: rm pytest_buckets.txt
|
run: rm pytest_buckets.txt
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
@ -1009,11 +1042,7 @@ jobs:
|
|||||||
MYSQL_ROOT_PASSWORD: password
|
MYSQL_ROOT_PASSWORD: password
|
||||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.mariadb_groups != '[]'
|
&& needs.info.outputs.mariadb_groups != '[]'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -1045,13 +1074,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1088,6 +1117,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.recorder")
|
cov_params+=(--cov="homeassistant.components.recorder")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1108,7 +1138,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1116,12 +1146,19 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||||
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1137,11 +1174,7 @@ jobs:
|
|||||||
POSTGRES_PASSWORD: password
|
POSTGRES_PASSWORD: password
|
||||||
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.postgresql_groups != '[]'
|
&& needs.info.outputs.postgresql_groups != '[]'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -1175,13 +1208,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1218,6 +1251,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.recorder")
|
cov_params+=(--cov="homeassistant.components.recorder")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1239,7 +1273,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1247,12 +1281,19 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||||
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1271,12 +1312,12 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: codecov/codecov-action@v5.3.1
|
uses: codecov/codecov-action@v5.4.0
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
flags: full-suite
|
flags: full-suite
|
||||||
@ -1285,11 +1326,7 @@ jobs:
|
|||||||
pytest-partial:
|
pytest-partial:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.tests_glob
|
&& needs.info.outputs.tests_glob
|
||||||
&& needs.info.outputs.test_full_suite == 'false'
|
&& needs.info.outputs.test_full_suite == 'false'
|
||||||
needs:
|
needs:
|
||||||
@ -1322,13 +1359,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.1
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1365,6 +1402,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1382,18 +1420,24 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1410,12 +1454,37 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: codecov/codecov-action@v5.3.1
|
uses: codecov/codecov-action@v5.4.0
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
upload-test-results:
|
||||||
|
name: Upload test results to Codecov
|
||||||
|
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||||
|
# therefore we can't run it on forks
|
||||||
|
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- info
|
||||||
|
- pytest-partial
|
||||||
|
- pytest-full
|
||||||
|
- pytest-postgres
|
||||||
|
- pytest-mariadb
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v4.2.1
|
||||||
|
with:
|
||||||
|
pattern: test-results-*
|
||||||
|
- name: Upload test results to Codecov
|
||||||
|
uses: codecov/test-results-action@v1
|
||||||
|
with:
|
||||||
|
fail_ci_if_error: true
|
||||||
|
verbose: true
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.28.10
|
uses: github/codeql-action/init@v3.28.13
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.28.10
|
uses: github/codeql-action/analyze@v3.28.13
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
28
.github/workflows/wheels.yml
vendored
28
.github/workflows/wheels.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v5.5.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -91,7 +91,7 @@ jobs:
|
|||||||
) > build_constraints.txt
|
) > build_constraints.txt
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
@ -99,14 +99,14 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload build_constraints
|
- name: Upload build_constraints
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
path: ./build_constraints.txt
|
path: ./build_constraints.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@ -118,7 +118,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@ -138,17 +138,17 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2024.11.0
|
uses: home-assistant/wheels@2025.03.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -187,22 +187,22 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v4.2.1
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@ -219,7 +219,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2024.11.0
|
uses: home-assistant/wheels@2025.03.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -69,6 +69,7 @@ test-reports/
|
|||||||
test-results.xml
|
test-results.xml
|
||||||
test-output.xml
|
test-output.xml
|
||||||
pytest-*.txt
|
pytest-*.txt
|
||||||
|
junit.xml
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.9.7
|
rev: v0.11.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
|
@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.*
|
|||||||
homeassistant.components.bluetooth_tracker.*
|
homeassistant.components.bluetooth_tracker.*
|
||||||
homeassistant.components.bmw_connected_drive.*
|
homeassistant.components.bmw_connected_drive.*
|
||||||
homeassistant.components.bond.*
|
homeassistant.components.bond.*
|
||||||
|
homeassistant.components.bosch_alarm.*
|
||||||
homeassistant.components.braviatv.*
|
homeassistant.components.braviatv.*
|
||||||
homeassistant.components.bring.*
|
homeassistant.components.bring.*
|
||||||
homeassistant.components.brother.*
|
homeassistant.components.brother.*
|
||||||
@ -136,6 +137,7 @@ homeassistant.components.clicksend.*
|
|||||||
homeassistant.components.climate.*
|
homeassistant.components.climate.*
|
||||||
homeassistant.components.cloud.*
|
homeassistant.components.cloud.*
|
||||||
homeassistant.components.co2signal.*
|
homeassistant.components.co2signal.*
|
||||||
|
homeassistant.components.comelit.*
|
||||||
homeassistant.components.command_line.*
|
homeassistant.components.command_line.*
|
||||||
homeassistant.components.config.*
|
homeassistant.components.config.*
|
||||||
homeassistant.components.configurator.*
|
homeassistant.components.configurator.*
|
||||||
@ -396,6 +398,7 @@ homeassistant.components.pure_energie.*
|
|||||||
homeassistant.components.purpleair.*
|
homeassistant.components.purpleair.*
|
||||||
homeassistant.components.pushbullet.*
|
homeassistant.components.pushbullet.*
|
||||||
homeassistant.components.pvoutput.*
|
homeassistant.components.pvoutput.*
|
||||||
|
homeassistant.components.pyload.*
|
||||||
homeassistant.components.python_script.*
|
homeassistant.components.python_script.*
|
||||||
homeassistant.components.qbus.*
|
homeassistant.components.qbus.*
|
||||||
homeassistant.components.qnap_qsw.*
|
homeassistant.components.qnap_qsw.*
|
||||||
@ -410,6 +413,7 @@ homeassistant.components.recollect_waste.*
|
|||||||
homeassistant.components.recorder.*
|
homeassistant.components.recorder.*
|
||||||
homeassistant.components.remember_the_milk.*
|
homeassistant.components.remember_the_milk.*
|
||||||
homeassistant.components.remote.*
|
homeassistant.components.remote.*
|
||||||
|
homeassistant.components.remote_calendar.*
|
||||||
homeassistant.components.renault.*
|
homeassistant.components.renault.*
|
||||||
homeassistant.components.reolink.*
|
homeassistant.components.reolink.*
|
||||||
homeassistant.components.repairs.*
|
homeassistant.components.repairs.*
|
||||||
@ -528,6 +532,7 @@ homeassistant.components.vallox.*
|
|||||||
homeassistant.components.valve.*
|
homeassistant.components.valve.*
|
||||||
homeassistant.components.velbus.*
|
homeassistant.components.velbus.*
|
||||||
homeassistant.components.vlc_telnet.*
|
homeassistant.components.vlc_telnet.*
|
||||||
|
homeassistant.components.vodafone_station.*
|
||||||
homeassistant.components.wake_on_lan.*
|
homeassistant.components.wake_on_lan.*
|
||||||
homeassistant.components.wake_word.*
|
homeassistant.components.wake_word.*
|
||||||
homeassistant.components.wallbox.*
|
homeassistant.components.wallbox.*
|
||||||
|
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@ -4,7 +4,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Run Home Assistant Core",
|
"label": "Run Home Assistant Core",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "hass -c ./config",
|
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
|
||||||
"group": "test",
|
"group": "test",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
|
16
CODEOWNERS
generated
16
CODEOWNERS
generated
@ -216,6 +216,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||||
|
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||||
|
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||||
/homeassistant/components/bosch_shc/ @tschamm
|
/homeassistant/components/bosch_shc/ @tschamm
|
||||||
/tests/components/bosch_shc/ @tschamm
|
/tests/components/bosch_shc/ @tschamm
|
||||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||||
@ -570,8 +572,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/google_cloud/ @lufton @tronikos
|
/tests/components/google_cloud/ @lufton @tronikos
|
||||||
/homeassistant/components/google_drive/ @tronikos
|
/homeassistant/components/google_drive/ @tronikos
|
||||||
/tests/components/google_drive/ @tronikos
|
/tests/components/google_drive/ @tronikos
|
||||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||||
/homeassistant/components/google_mail/ @tkdrob
|
/homeassistant/components/google_mail/ @tkdrob
|
||||||
/tests/components/google_mail/ @tkdrob
|
/tests/components/google_mail/ @tkdrob
|
||||||
/homeassistant/components/google_photos/ @allenporter
|
/homeassistant/components/google_photos/ @allenporter
|
||||||
@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/prusalink/ @balloob
|
/tests/components/prusalink/ @balloob
|
||||||
/homeassistant/components/ps4/ @ktnrg45
|
/homeassistant/components/ps4/ @ktnrg45
|
||||||
/tests/components/ps4/ @ktnrg45
|
/tests/components/ps4/ @ktnrg45
|
||||||
|
/homeassistant/components/pterodactyl/ @elmurato
|
||||||
|
/tests/components/pterodactyl/ @elmurato
|
||||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||||
/tests/components/pure_energie/ @klaasnicolaas
|
/tests/components/pure_energie/ @klaasnicolaas
|
||||||
/homeassistant/components/purpleair/ @bachya
|
/homeassistant/components/purpleair/ @bachya
|
||||||
@ -1252,6 +1256,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/refoss/ @ashionky
|
/tests/components/refoss/ @ashionky
|
||||||
/homeassistant/components/remote/ @home-assistant/core
|
/homeassistant/components/remote/ @home-assistant/core
|
||||||
/tests/components/remote/ @home-assistant/core
|
/tests/components/remote/ @home-assistant/core
|
||||||
|
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||||
|
/tests/components/remote_calendar/ @Thomas55555
|
||||||
/homeassistant/components/renault/ @epenet
|
/homeassistant/components/renault/ @epenet
|
||||||
/tests/components/renault/ @epenet
|
/tests/components/renault/ @epenet
|
||||||
/homeassistant/components/renson/ @jimmyd-be
|
/homeassistant/components/renson/ @jimmyd-be
|
||||||
@ -1474,8 +1480,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/suez_water/ @ooii @jb101010-2
|
/tests/components/suez_water/ @ooii @jb101010-2
|
||||||
/homeassistant/components/sun/ @Swamp-Ig
|
/homeassistant/components/sun/ @Swamp-Ig
|
||||||
/tests/components/sun/ @Swamp-Ig
|
/tests/components/sun/ @Swamp-Ig
|
||||||
/homeassistant/components/sunweg/ @rokam
|
|
||||||
/tests/components/sunweg/ @rokam
|
|
||||||
/homeassistant/components/supla/ @mwegrzynek
|
/homeassistant/components/supla/ @mwegrzynek
|
||||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||||
@ -1529,8 +1533,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tedee/ @patrickhilker @zweckj
|
/tests/components/tedee/ @patrickhilker @zweckj
|
||||||
/homeassistant/components/tellduslive/ @fredrike
|
/homeassistant/components/tellduslive/ @fredrike
|
||||||
/tests/components/tellduslive/ @fredrike
|
/tests/components/tellduslive/ @fredrike
|
||||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||||
/homeassistant/components/tesla_fleet/ @Bre77
|
/homeassistant/components/tesla_fleet/ @Bre77
|
||||||
/tests/components/tesla_fleet/ @Bre77
|
/tests/components/tesla_fleet/ @Bre77
|
||||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||||
|
4
Dockerfile
generated
4
Dockerfile
generated
@ -25,13 +25,13 @@ RUN \
|
|||||||
"armv7") go2rtc_suffix='arm' ;; \
|
"armv7") go2rtc_suffix='arm' ;; \
|
||||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||||
esac \
|
esac \
|
||||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||||
&& chmod +x /bin/go2rtc \
|
&& chmod +x /bin/go2rtc \
|
||||||
# Verify go2rtc can be executed
|
# Verify go2rtc can be executed
|
||||||
&& go2rtc --version
|
&& go2rtc --version
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.6.1
|
RUN pip3 install uv==0.6.10
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
@ -19,4 +19,4 @@ labels:
|
|||||||
org.opencontainers.image.authors: The Home Assistant Authors
|
org.opencontainers.image.authors: The Home Assistant Authors
|
||||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||||
org.opencontainers.image.licenses: Apache License 2.0
|
org.opencontainers.image.licenses: Apache-2.0
|
||||||
|
@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
|||||||
strict_core=False,
|
strict_core=False,
|
||||||
skip_for_tests=True,
|
skip_for_tests=True,
|
||||||
),
|
),
|
||||||
|
BlockingCall(
|
||||||
|
original_func=SSLContext.set_default_verify_paths,
|
||||||
|
object=SSLContext,
|
||||||
|
function="set_default_verify_paths",
|
||||||
|
check_allowed=None,
|
||||||
|
strict=False,
|
||||||
|
strict_core=False,
|
||||||
|
skip_for_tests=True,
|
||||||
|
),
|
||||||
BlockingCall(
|
BlockingCall(
|
||||||
original_func=Path.open,
|
original_func=Path.open,
|
||||||
object=Path,
|
object=Path,
|
||||||
|
@ -81,6 +81,7 @@ from .helpers import (
|
|||||||
entity,
|
entity,
|
||||||
entity_registry,
|
entity_registry,
|
||||||
floor_registry,
|
floor_registry,
|
||||||
|
frame,
|
||||||
issue_registry,
|
issue_registry,
|
||||||
label_registry,
|
label_registry,
|
||||||
recorder,
|
recorder,
|
||||||
@ -92,6 +93,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal
|
|||||||
from .helpers.storage import get_internal_store_manager
|
from .helpers.storage import get_internal_store_manager
|
||||||
from .helpers.system_info import async_get_system_info
|
from .helpers.system_info import async_get_system_info
|
||||||
from .helpers.typing import ConfigType
|
from .helpers.typing import ConfigType
|
||||||
|
from .loader import Integration
|
||||||
from .setup import (
|
from .setup import (
|
||||||
# _setup_started is marked as protected to make it clear
|
# _setup_started is marked as protected to make it clear
|
||||||
# that it is not part of the public API and should not be used
|
# that it is not part of the public API and should not be used
|
||||||
@ -298,14 +300,6 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
return hass
|
return hass
|
||||||
|
|
||||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
|
||||||
"""Stop hass."""
|
|
||||||
# Ask integrations to shut down. It's messy but we can't
|
|
||||||
# do a clean stop without knowing what is broken
|
|
||||||
with contextlib.suppress(TimeoutError):
|
|
||||||
async with hass.timeout.async_timeout(10):
|
|
||||||
await hass.async_stop()
|
|
||||||
|
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||||
@ -344,7 +338,7 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
if config_dict is None:
|
if config_dict is None:
|
||||||
recovery_mode = True
|
recovery_mode = True
|
||||||
await stop_hass(hass)
|
await hass.async_stop(force=True)
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
elif not basic_setup_success:
|
elif not basic_setup_success:
|
||||||
@ -352,7 +346,7 @@ async def async_setup_hass(
|
|||||||
"Unable to set up core integrations. Activating recovery mode"
|
"Unable to set up core integrations. Activating recovery mode"
|
||||||
)
|
)
|
||||||
recovery_mode = True
|
recovery_mode = True
|
||||||
await stop_hass(hass)
|
await hass.async_stop(force=True)
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
elif any(
|
elif any(
|
||||||
@ -367,7 +361,7 @@ async def async_setup_hass(
|
|||||||
old_logging = hass.data.get(DATA_LOGGING)
|
old_logging = hass.data.get(DATA_LOGGING)
|
||||||
|
|
||||||
recovery_mode = True
|
recovery_mode = True
|
||||||
await stop_hass(hass)
|
await hass.async_stop(force=True)
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
if old_logging:
|
if old_logging:
|
||||||
@ -441,9 +435,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
|||||||
if DATA_REGISTRIES_LOADED in hass.data:
|
if DATA_REGISTRIES_LOADED in hass.data:
|
||||||
return
|
return
|
||||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||||
translation.async_setup(hass)
|
|
||||||
entity.async_setup(hass)
|
entity.async_setup(hass)
|
||||||
|
frame.async_setup(hass)
|
||||||
template.async_setup(hass)
|
template.async_setup(hass)
|
||||||
|
translation.async_setup(hass)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||||
create_eager_task(area_registry.async_load(hass)),
|
create_eager_task(area_registry.async_load(hass)),
|
||||||
@ -664,11 +659,10 @@ def _create_log_file(
|
|||||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||||
err_log_path, backupCount=1
|
err_log_path, backupCount=1
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
try:
|
err_handler.doRollover()
|
||||||
err_handler.doRollover()
|
except OSError as err:
|
||||||
except OSError as err:
|
_LOGGER.error("Error rolling over log file: %s", err)
|
||||||
_LOGGER.error("Error rolling over log file: %s", err)
|
|
||||||
|
|
||||||
return err_handler
|
return err_handler
|
||||||
|
|
||||||
@ -718,20 +712,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
|||||||
return domains
|
return domains
|
||||||
|
|
||||||
|
|
||||||
async def _async_resolve_domains_to_setup(
|
async def _async_resolve_domains_and_preload(
|
||||||
hass: core.HomeAssistant, config: dict[str, Any]
|
hass: core.HomeAssistant, config: dict[str, Any]
|
||||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
) -> tuple[dict[str, Integration], dict[str, Integration]]:
|
||||||
"""Resolve all dependencies and return list of domains to set up."""
|
"""Resolve all dependencies and return integrations to set up.
|
||||||
|
|
||||||
|
The return value is a tuple of two dictionaries:
|
||||||
|
- The first dictionary contains integrations
|
||||||
|
specified by the configuration (including config entries).
|
||||||
|
- The second dictionary contains the same integrations as the first dictionary
|
||||||
|
together with all their dependencies.
|
||||||
|
"""
|
||||||
domains_to_setup = _get_domains(hass, config)
|
domains_to_setup = _get_domains(hass, config)
|
||||||
needed_requirements: set[str] = set()
|
|
||||||
platform_integrations = conf_util.extract_platform_integrations(
|
platform_integrations = conf_util.extract_platform_integrations(
|
||||||
config, BASE_PLATFORMS
|
config, BASE_PLATFORMS
|
||||||
)
|
)
|
||||||
# Ensure base platforms that have platform integrations are added to
|
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||||
# to `domains_to_setup so they can be setup first instead of
|
# so they can be setup first instead of discovering them later when a config
|
||||||
# discovering them when later when a config entry setup task
|
# entry setup task notices that it's needed and there is already a long line
|
||||||
# notices its needed and there is already a long line to use
|
# to use the import executor.
|
||||||
# the import executor.
|
|
||||||
#
|
#
|
||||||
# For example if we have
|
# For example if we have
|
||||||
# sensor:
|
# sensor:
|
||||||
@ -747,111 +746,78 @@ async def _async_resolve_domains_to_setup(
|
|||||||
# so this will be less of a problem in the future.
|
# so this will be less of a problem in the future.
|
||||||
domains_to_setup.update(platform_integrations)
|
domains_to_setup.update(platform_integrations)
|
||||||
|
|
||||||
# Load manifests for base platforms and platform based integrations
|
# Additionally process base platforms since we do not require the manifest
|
||||||
# that are defined under base platforms right away since we do not require
|
# to list them as dependencies.
|
||||||
# the manifest to list them as dependencies and we want to avoid the lock
|
# We want to later avoid lock contention when multiple integrations try to load
|
||||||
# contention when multiple integrations try to load them at once
|
# their manifests at once.
|
||||||
additional_manifests_to_load = {
|
# Also process integrations that are defined under base platforms
|
||||||
|
# to speed things up.
|
||||||
|
additional_domains_to_process = {
|
||||||
*BASE_PLATFORMS,
|
*BASE_PLATFORMS,
|
||||||
*chain.from_iterable(platform_integrations.values()),
|
*chain.from_iterable(platform_integrations.values()),
|
||||||
}
|
}
|
||||||
|
|
||||||
translations_to_load = additional_manifests_to_load.copy()
|
|
||||||
|
|
||||||
# Resolve all dependencies so we know all integrations
|
# Resolve all dependencies so we know all integrations
|
||||||
# that will have to be loaded and start right-away
|
# that will have to be loaded and start right-away
|
||||||
integration_cache: dict[str, loader.Integration] = {}
|
integrations_or_excs = await loader.async_get_integrations(
|
||||||
to_resolve: set[str] = domains_to_setup
|
hass, {*domains_to_setup, *additional_domains_to_process}
|
||||||
while to_resolve or additional_manifests_to_load:
|
)
|
||||||
old_to_resolve: set[str] = to_resolve
|
# Eliminate those missing or with invalid manifest
|
||||||
to_resolve = set()
|
integrations_to_process = {
|
||||||
|
domain: itg
|
||||||
|
for domain, itg in integrations_or_excs.items()
|
||||||
|
if isinstance(itg, Integration)
|
||||||
|
}
|
||||||
|
integrations_dependencies = await loader.resolve_integrations_dependencies(
|
||||||
|
hass, integrations_to_process.values()
|
||||||
|
)
|
||||||
|
# Eliminate those without valid dependencies
|
||||||
|
integrations_to_process = {
|
||||||
|
domain: integrations_to_process[domain] for domain in integrations_dependencies
|
||||||
|
}
|
||||||
|
|
||||||
if additional_manifests_to_load:
|
integrations_to_setup = {
|
||||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
domain: itg
|
||||||
additional_manifests_to_load.clear()
|
for domain, itg in integrations_to_process.items()
|
||||||
else:
|
if domain in domains_to_setup
|
||||||
to_get = old_to_resolve
|
}
|
||||||
|
all_integrations_to_setup = integrations_to_setup.copy()
|
||||||
|
all_integrations_to_setup.update(
|
||||||
|
(dep, loader.async_get_loaded_integration(hass, dep))
|
||||||
|
for domain in integrations_to_setup
|
||||||
|
for dep in integrations_dependencies[domain].difference(
|
||||||
|
all_integrations_to_setup
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
manifest_deps: set[str] = set()
|
# Gather requirements for all integrations,
|
||||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
# their dependencies and after dependencies.
|
||||||
integrations_to_process: list[loader.Integration] = []
|
# To gather all the requirements we must ignore exceptions here.
|
||||||
|
# The exceptions will be detected and handled later in the bootstrap process.
|
||||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
integrations_after_dependencies = (
|
||||||
if not isinstance(itg, loader.Integration):
|
await loader.resolve_integrations_after_dependencies(
|
||||||
continue
|
hass, integrations_to_process.values(), ignore_exceptions=True
|
||||||
integration_cache[domain] = itg
|
)
|
||||||
needed_requirements.update(itg.requirements)
|
)
|
||||||
|
integrations_requirements = {
|
||||||
# Make sure manifests for dependencies are loaded in the next
|
domain: itg.requirements for domain, itg in integrations_to_process.items()
|
||||||
# loop to try to group as many as manifest loads in a single
|
}
|
||||||
# call to avoid the creating one-off executor jobs later in
|
integrations_requirements.update(
|
||||||
# the setup process
|
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
|
||||||
additional_manifests_to_load.update(
|
for deps in integrations_after_dependencies.values()
|
||||||
dep
|
for dep in deps.difference(integrations_requirements)
|
||||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
)
|
||||||
if dep not in integration_cache
|
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
|
||||||
)
|
|
||||||
|
|
||||||
if domain not in old_to_resolve:
|
|
||||||
continue
|
|
||||||
|
|
||||||
integrations_to_process.append(itg)
|
|
||||||
manifest_deps.update(itg.dependencies)
|
|
||||||
manifest_deps.update(itg.after_dependencies)
|
|
||||||
if not itg.all_dependencies_resolved:
|
|
||||||
resolve_dependencies_tasks.append(
|
|
||||||
create_eager_task(
|
|
||||||
itg.resolve_dependencies(),
|
|
||||||
name=f"resolve dependencies {domain}",
|
|
||||||
loop=hass.loop,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
|
||||||
# If there are dependencies, try to preload all
|
|
||||||
# the integrations manifest at once and add them
|
|
||||||
# to the list of requirements we need to install
|
|
||||||
# so we can try to check if they are already installed
|
|
||||||
# in a single call below which avoids each integration
|
|
||||||
# having to wait for the lock to do it individually
|
|
||||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
|
||||||
for dependant_domain, dependant_itg in deps.items():
|
|
||||||
if isinstance(dependant_itg, loader.Integration):
|
|
||||||
integration_cache[dependant_domain] = dependant_itg
|
|
||||||
needed_requirements.update(dependant_itg.requirements)
|
|
||||||
|
|
||||||
if resolve_dependencies_tasks:
|
|
||||||
await asyncio.gather(*resolve_dependencies_tasks)
|
|
||||||
|
|
||||||
for itg in integrations_to_process:
|
|
||||||
try:
|
|
||||||
all_deps = itg.all_dependencies
|
|
||||||
except RuntimeError:
|
|
||||||
# Integration.all_dependencies raises RuntimeError if
|
|
||||||
# dependencies could not be resolved
|
|
||||||
continue
|
|
||||||
for dep in all_deps:
|
|
||||||
if dep in domains_to_setup:
|
|
||||||
continue
|
|
||||||
domains_to_setup.add(dep)
|
|
||||||
to_resolve.add(dep)
|
|
||||||
|
|
||||||
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
|
||||||
|
|
||||||
# Optimistically check if requirements are already installed
|
# Optimistically check if requirements are already installed
|
||||||
# ahead of setting up the integrations so we can prime the cache
|
# ahead of setting up the integrations so we can prime the cache
|
||||||
# We do not wait for this since its an optimization only
|
# We do not wait for this since it's an optimization only
|
||||||
hass.async_create_background_task(
|
hass.async_create_background_task(
|
||||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
requirements.async_load_installed_versions(hass, all_requirements),
|
||||||
"check installed requirements",
|
"check installed requirements",
|
||||||
eager_start=True,
|
eager_start=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
#
|
|
||||||
# Only add the domains_to_setup after we finish resolving
|
|
||||||
# as new domains are likely to added in the process
|
|
||||||
#
|
|
||||||
translations_to_load.update(domains_to_setup)
|
|
||||||
# Start loading translations for all integrations we are going to set up
|
# Start loading translations for all integrations we are going to set up
|
||||||
# in the background so they are ready when we need them. This avoids a
|
# in the background so they are ready when we need them. This avoids a
|
||||||
# lot of waiting for the translation load lock and a thundering herd of
|
# lot of waiting for the translation load lock and a thundering herd of
|
||||||
@ -862,6 +828,7 @@ async def _async_resolve_domains_to_setup(
|
|||||||
# hold the translation load lock and if anything is fast enough to
|
# hold the translation load lock and if anything is fast enough to
|
||||||
# wait for the translation load lock, loading will be done by the
|
# wait for the translation load lock, loading will be done by the
|
||||||
# time it gets to it.
|
# time it gets to it.
|
||||||
|
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
|
||||||
hass.async_create_background_task(
|
hass.async_create_background_task(
|
||||||
translation.async_load_integrations(hass, translations_to_load),
|
translation.async_load_integrations(hass, translations_to_load),
|
||||||
"load translations",
|
"load translations",
|
||||||
@ -873,13 +840,13 @@ async def _async_resolve_domains_to_setup(
|
|||||||
# in the setup process.
|
# in the setup process.
|
||||||
hass.async_create_background_task(
|
hass.async_create_background_task(
|
||||||
get_internal_store_manager(hass).async_preload(
|
get_internal_store_manager(hass).async_preload(
|
||||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
[*PRELOAD_STORAGE, *all_integrations_to_setup]
|
||||||
),
|
),
|
||||||
"preload storage",
|
"preload storage",
|
||||||
eager_start=True,
|
eager_start=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
return domains_to_setup, integration_cache
|
return integrations_to_setup, all_integrations_to_setup
|
||||||
|
|
||||||
|
|
||||||
async def _async_set_up_integrations(
|
async def _async_set_up_integrations(
|
||||||
@ -889,69 +856,90 @@ async def _async_set_up_integrations(
|
|||||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||||
watcher.async_start()
|
watcher.async_start()
|
||||||
|
|
||||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||||
hass, config
|
hass, config
|
||||||
)
|
)
|
||||||
stage_2_domains = domains_to_setup.copy()
|
all_domains = set(all_integrations)
|
||||||
|
domains = set(integrations)
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Domains to be set up: %s | %s",
|
||||||
|
domains,
|
||||||
|
all_domains - domains,
|
||||||
|
)
|
||||||
|
|
||||||
# Initialize recorder
|
# Initialize recorder
|
||||||
if "recorder" in domains_to_setup:
|
if "recorder" in all_domains:
|
||||||
recorder.async_initialize_recorder(hass)
|
recorder.async_initialize_recorder(hass)
|
||||||
|
|
||||||
# Initialize backup
|
# Initialize backup
|
||||||
if "backup" in domains_to_setup:
|
if "backup" in all_domains:
|
||||||
backup.async_initialize_backup(hass)
|
backup.async_initialize_backup(hass)
|
||||||
|
|
||||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
stages: list[tuple[str, set[str], int | None]] = [
|
||||||
*(
|
*(
|
||||||
(name, domain_group & domains_to_setup, timeout)
|
(name, domain_group, timeout)
|
||||||
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||||
),
|
),
|
||||||
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
|
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
|
||||||
|
("2", domains, STAGE_2_TIMEOUT),
|
||||||
]
|
]
|
||||||
|
|
||||||
_LOGGER.info("Setting up stage 0 and 1")
|
_LOGGER.info("Setting up stage 0")
|
||||||
for name, domain_group, timeout in stage_0_and_1_domains:
|
for name, domain_group, timeout in stages:
|
||||||
if not domain_group:
|
stage_domains_unfiltered = domain_group & all_domains
|
||||||
|
if not stage_domains_unfiltered:
|
||||||
|
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
stage_domains = stage_domains_unfiltered - hass.config.components
|
||||||
to_be_loaded = domain_group.copy()
|
if not stage_domains:
|
||||||
to_be_loaded.update(
|
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
|
||||||
|
continue
|
||||||
|
|
||||||
|
stage_dep_domains_unfiltered = {
|
||||||
dep
|
dep
|
||||||
for domain in domain_group
|
for domain in stage_domains
|
||||||
if (integration := integration_cache.get(domain)) is not None
|
for dep in all_integrations[domain].all_dependencies
|
||||||
for dep in integration.all_dependencies
|
if dep not in stage_domains
|
||||||
|
}
|
||||||
|
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||||
|
|
||||||
|
stage_all_domains = stage_domains | stage_dep_domains
|
||||||
|
stage_all_integrations = {
|
||||||
|
domain: all_integrations[domain] for domain in stage_all_domains
|
||||||
|
}
|
||||||
|
# Detect all cycles
|
||||||
|
stage_integrations_after_dependencies = (
|
||||||
|
await loader.resolve_integrations_after_dependencies(
|
||||||
|
hass, stage_all_integrations.values(), stage_all_domains
|
||||||
|
)
|
||||||
)
|
)
|
||||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
stage_all_domains = set(stage_integrations_after_dependencies)
|
||||||
stage_2_domains -= to_be_loaded
|
stage_domains &= stage_all_domains
|
||||||
|
stage_dep_domains &= stage_all_domains
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||||
|
name,
|
||||||
|
stage_domains,
|
||||||
|
stage_domains_unfiltered - stage_domains,
|
||||||
|
stage_dep_domains,
|
||||||
|
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||||
|
)
|
||||||
|
|
||||||
|
async_set_domains_to_be_loaded(hass, stage_all_domains)
|
||||||
|
|
||||||
if timeout is None:
|
if timeout is None:
|
||||||
await _async_setup_multi_components(hass, domain_group, config)
|
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||||
else:
|
continue
|
||||||
try:
|
|
||||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
|
||||||
await _async_setup_multi_components(hass, domain_group, config)
|
|
||||||
except TimeoutError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Setup timed out for %s waiting on %s - moving forward",
|
|
||||||
name,
|
|
||||||
hass._active_tasks, # noqa: SLF001
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add after dependencies when setting up stage 2 domains
|
|
||||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
|
||||||
|
|
||||||
if stage_2_domains:
|
|
||||||
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
|
||||||
try:
|
try:
|
||||||
async with hass.timeout.async_timeout(
|
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||||
):
|
|
||||||
await _async_setup_multi_components(hass, stage_2_domains, config)
|
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
"Setup timed out for stage %s waiting on %s - moving forward",
|
||||||
|
name,
|
||||||
hass._active_tasks, # noqa: SLF001
|
hass._active_tasks, # noqa: SLF001
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1053,8 +1041,6 @@ async def _async_setup_multi_components(
|
|||||||
config: dict[str, Any],
|
config: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up multiple domains. Log on failure."""
|
"""Set up multiple domains. Log on failure."""
|
||||||
# Avoid creating tasks for domains that were setup in a previous stage
|
|
||||||
domains_not_yet_setup = domains - hass.config.components
|
|
||||||
# Create setup tasks for base platforms first since everything will have
|
# Create setup tasks for base platforms first since everything will have
|
||||||
# to wait to be imported, and the sooner we can get the base platforms
|
# to wait to be imported, and the sooner we can get the base platforms
|
||||||
# loaded the sooner we can start loading the rest of the integrations.
|
# loaded the sooner we can start loading the rest of the integrations.
|
||||||
@ -1064,9 +1050,7 @@ async def _async_setup_multi_components(
|
|||||||
f"setup component {domain}",
|
f"setup component {domain}",
|
||||||
eager_start=True,
|
eager_start=True,
|
||||||
)
|
)
|
||||||
for domain in sorted(
|
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
|
||||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||||
for idx, domain in enumerate(futures):
|
for idx, domain in enumerate(futures):
|
||||||
|
5
homeassistant/brands/bosch.json
Normal file
5
homeassistant/brands/bosch.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "bosch",
|
||||||
|
"name": "Bosch",
|
||||||
|
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
|
||||||
|
}
|
5
homeassistant/brands/eve.json
Normal file
5
homeassistant/brands/eve.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "eve",
|
||||||
|
"name": "Eve",
|
||||||
|
"iot_standards": ["matter"]
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"domain": "motionblinds",
|
"domain": "motionblinds",
|
||||||
"name": "Motionblinds",
|
"name": "Motionblinds",
|
||||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||||
|
"iot_standards": ["matter"]
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ from homeassistant.components.weather import (
|
|||||||
|
|
||||||
API_METRIC: Final = "Metric"
|
API_METRIC: Final = "Metric"
|
||||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||||
ATTR_CATEGORY: Final = "Category"
|
ATTR_CATEGORY_VALUE = "CategoryValue"
|
||||||
ATTR_DIRECTION: Final = "Direction"
|
ATTR_DIRECTION: Final = "Direction"
|
||||||
ATTR_ENGLISH: Final = "English"
|
ATTR_ENGLISH: Final = "English"
|
||||||
ATTR_LEVEL: Final = "level"
|
ATTR_LEVEL: Final = "level"
|
||||||
@ -55,5 +55,18 @@ CONDITION_MAP = {
|
|||||||
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
||||||
for cond_code in cond_codes
|
for cond_code in cond_codes
|
||||||
}
|
}
|
||||||
|
AIR_QUALITY_CATEGORY_MAP = {
|
||||||
|
1: "good",
|
||||||
|
2: "moderate",
|
||||||
|
3: "unhealthy",
|
||||||
|
4: "very_unhealthy",
|
||||||
|
5: "hazardous",
|
||||||
|
}
|
||||||
|
POLLEN_CATEGORY_MAP = {
|
||||||
|
1: "low",
|
||||||
|
2: "moderate",
|
||||||
|
3: "high",
|
||||||
|
4: "very_high",
|
||||||
|
}
|
||||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||||
|
@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
result = await self.accuweather.async_get_current_conditions()
|
result = await self.accuweather.async_get_current_conditions()
|
||||||
except EXCEPTIONS as error:
|
except EXCEPTIONS as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="current_conditions_update_error",
|
||||||
|
translation_placeholders={"error": repr(error)},
|
||||||
|
) from error
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
@ -117,9 +121,15 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||||||
"""Update data via library."""
|
"""Update data via library."""
|
||||||
try:
|
try:
|
||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
result = await self.accuweather.async_get_daily_forecast()
|
result = await self.accuweather.async_get_daily_forecast(
|
||||||
|
language=self.hass.config.language
|
||||||
|
)
|
||||||
except EXCEPTIONS as error:
|
except EXCEPTIONS as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="forecast_update_error",
|
||||||
|
translation_placeholders={"error": repr(error)},
|
||||||
|
) from error
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"requirements": ["accuweather==4.1.0"],
|
"requirements": ["accuweather==4.2.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -29,8 +29,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
AIR_QUALITY_CATEGORY_MAP,
|
||||||
API_METRIC,
|
API_METRIC,
|
||||||
ATTR_CATEGORY,
|
ATTR_CATEGORY_VALUE,
|
||||||
ATTR_DIRECTION,
|
ATTR_DIRECTION,
|
||||||
ATTR_ENGLISH,
|
ATTR_ENGLISH,
|
||||||
ATTR_LEVEL,
|
ATTR_LEVEL,
|
||||||
@ -38,6 +39,7 @@ from .const import (
|
|||||||
ATTR_VALUE,
|
ATTR_VALUE,
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
MAX_FORECAST_DAYS,
|
MAX_FORECAST_DAYS,
|
||||||
|
POLLEN_CATEGORY_MAP,
|
||||||
)
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
AccuWeatherConfigEntry,
|
AccuWeatherConfigEntry,
|
||||||
@ -59,9 +61,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
|
|||||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
key="AirQuality",
|
key="AirQuality",
|
||||||
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
|
value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]],
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
|
options=list(AIR_QUALITY_CATEGORY_MAP.values()),
|
||||||
translation_key="air_quality",
|
translation_key="air_quality",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -83,7 +85,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="grass_pollen",
|
translation_key="grass_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -107,7 +111,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="mold_pollen",
|
translation_key="mold_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -115,7 +121,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="ragweed_pollen",
|
translation_key="ragweed_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -181,14 +189,18 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="tree_pollen",
|
translation_key="tree_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
key="UVIndex",
|
key="UVIndex",
|
||||||
native_unit_of_measurement=UV_INDEX,
|
native_unit_of_measurement=UV_INDEX,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="uv_index_forecast",
|
translation_key="uv_index_forecast",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
|
@ -26,10 +26,20 @@
|
|||||||
"state": {
|
"state": {
|
||||||
"good": "Good",
|
"good": "Good",
|
||||||
"hazardous": "Hazardous",
|
"hazardous": "Hazardous",
|
||||||
"high": "High",
|
|
||||||
"low": "Low",
|
|
||||||
"moderate": "Moderate",
|
"moderate": "Moderate",
|
||||||
"unhealthy": "Unhealthy"
|
"unhealthy": "Unhealthy",
|
||||||
|
"very_unhealthy": "Very unhealthy"
|
||||||
|
},
|
||||||
|
"state_attributes": {
|
||||||
|
"options": {
|
||||||
|
"state": {
|
||||||
|
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||||
|
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||||
|
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||||
|
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]",
|
||||||
|
"very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"apparent_temperature": {
|
"apparent_temperature": {
|
||||||
@ -62,12 +72,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "Level",
|
"name": "Level",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "High",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "Low",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "Moderate",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "Very high"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -81,12 +89,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -100,6 +106,15 @@
|
|||||||
"steady": "Steady",
|
"steady": "Steady",
|
||||||
"rising": "Rising",
|
"rising": "Rising",
|
||||||
"falling": "Falling"
|
"falling": "Falling"
|
||||||
|
},
|
||||||
|
"state_attributes": {
|
||||||
|
"options": {
|
||||||
|
"state": {
|
||||||
|
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
|
||||||
|
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
|
||||||
|
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ragweed_pollen": {
|
"ragweed_pollen": {
|
||||||
@ -108,12 +123,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -154,12 +167,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -170,12 +181,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -186,12 +195,10 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -222,6 +229,14 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"current_conditions_update_error": {
|
||||||
|
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||||
|
},
|
||||||
|
"forecast_update_error": {
|
||||||
|
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"system_health": {
|
"system_health": {
|
||||||
"info": {
|
"info": {
|
||||||
"can_reach_server": "Reach AccuWeather server",
|
"can_reach_server": "Reach AccuWeather server",
|
||||||
|
@ -5,14 +5,14 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"connection_type": "Select connection type"
|
"connection_type": "Select connection type"
|
||||||
},
|
},
|
||||||
"description": "Select connection type. Local requires heaters with bluetooth"
|
"description": "Select connection type. Local requires heaters with Bluetooth"
|
||||||
},
|
},
|
||||||
"local": {
|
"local": {
|
||||||
"data": {
|
"data": {
|
||||||
"wifi_ssid": "Wi-Fi SSID",
|
"wifi_ssid": "Wi-Fi SSID",
|
||||||
"wifi_pswd": "Wi-Fi Password"
|
"wifi_pswd": "Wi-Fi password"
|
||||||
},
|
},
|
||||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes."
|
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||||
},
|
},
|
||||||
"cloud": {
|
"cloud": {
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from decimal import Decimal
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -14,6 +15,7 @@ from homeassistant.components.climate import (
|
|||||||
FAN_MEDIUM,
|
FAN_MEDIUM,
|
||||||
ClimateEntity,
|
ClimateEntity,
|
||||||
ClimateEntityFeature,
|
ClimateEntityFeature,
|
||||||
|
HVACAction,
|
||||||
HVACMode,
|
HVACMode,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||||
@ -49,6 +51,14 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
|||||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||||
ADVANTAGE_AIR_MYFAN = "autoAA"
|
ADVANTAGE_AIR_MYFAN = "autoAA"
|
||||||
|
ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode"
|
||||||
|
|
||||||
|
HVAC_ACTIONS = {
|
||||||
|
"cool": HVACAction.COOLING,
|
||||||
|
"heat": HVACAction.HEATING,
|
||||||
|
"vent": HVACAction.FAN,
|
||||||
|
"dry": HVACAction.DRYING,
|
||||||
|
}
|
||||||
|
|
||||||
HVAC_MODES = [
|
HVAC_MODES = [
|
||||||
HVACMode.OFF,
|
HVACMode.OFF,
|
||||||
@ -175,6 +185,17 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
|
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
|
||||||
return HVACMode.OFF
|
return HVACMode.OFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_action(self) -> HVACAction | None:
|
||||||
|
"""Return the current running HVAC action."""
|
||||||
|
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||||
|
return HVACAction.OFF
|
||||||
|
if self._ac["mode"] == "myauto":
|
||||||
|
return HVAC_ACTIONS.get(
|
||||||
|
self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF)
|
||||||
|
)
|
||||||
|
return HVAC_ACTIONS.get(self._ac["mode"])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fan_mode(self) -> str | None:
|
def fan_mode(self) -> str | None:
|
||||||
"""Return the current fan modes."""
|
"""Return the current fan modes."""
|
||||||
@ -273,6 +294,22 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
|||||||
return HVACMode.HEAT_COOL
|
return HVACMode.HEAT_COOL
|
||||||
return HVACMode.OFF
|
return HVACMode.OFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_action(self) -> HVACAction | None:
|
||||||
|
"""Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%."""
|
||||||
|
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||||
|
return HVACAction.OFF
|
||||||
|
master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF)
|
||||||
|
if self._ac["mode"] == "myauto":
|
||||||
|
master_action = HVAC_ACTIONS.get(
|
||||||
|
str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF
|
||||||
|
)
|
||||||
|
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
|
||||||
|
if self._zone["value"] <= Decimal(5):
|
||||||
|
return HVACAction.IDLE
|
||||||
|
return master_action
|
||||||
|
return HVACAction.OFF
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_temperature(self) -> float | None:
|
def current_temperature(self) -> float | None:
|
||||||
"""Return the current temperature."""
|
"""Return the current temperature."""
|
||||||
|
@ -7,3 +7,4 @@ ADVANTAGE_AIR_STATE_CLOSE = "close"
|
|||||||
ADVANTAGE_AIR_STATE_ON = "on"
|
ADVANTAGE_AIR_STATE_ON = "on"
|
||||||
ADVANTAGE_AIR_STATE_OFF = "off"
|
ADVANTAGE_AIR_STATE_OFF = "off"
|
||||||
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
|
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
|
||||||
|
ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled"
|
||||||
|
@ -41,7 +41,7 @@ async def async_setup_entry(
|
|||||||
entities.append(
|
entities.append(
|
||||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
|
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
|
||||||
)
|
)
|
||||||
elif thing["channelDipState"] == 3: # 3 = "Garage door"
|
elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door"
|
||||||
entities.append(
|
entities.append(
|
||||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
|
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
|
||||||
)
|
)
|
||||||
|
@ -9,6 +9,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from . import AdvantageAirDataConfigEntry
|
from . import AdvantageAirDataConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
ADVANTAGE_AIR_AUTOFAN_ENABLED,
|
ADVANTAGE_AIR_AUTOFAN_ENABLED,
|
||||||
|
ADVANTAGE_AIR_NIGHT_MODE_ENABLED,
|
||||||
ADVANTAGE_AIR_STATE_OFF,
|
ADVANTAGE_AIR_STATE_OFF,
|
||||||
ADVANTAGE_AIR_STATE_ON,
|
ADVANTAGE_AIR_STATE_ON,
|
||||||
)
|
)
|
||||||
@ -32,6 +33,8 @@ async def async_setup_entry(
|
|||||||
entities.append(AdvantageAirFreshAir(instance, ac_key))
|
entities.append(AdvantageAirFreshAir(instance, ac_key))
|
||||||
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
|
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
|
||||||
entities.append(AdvantageAirMyFan(instance, ac_key))
|
entities.append(AdvantageAirMyFan(instance, ac_key))
|
||||||
|
if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]:
|
||||||
|
entities.append(AdvantageAirNightMode(instance, ac_key))
|
||||||
if things := instance.coordinator.data.get("myThings"):
|
if things := instance.coordinator.data.get("myThings"):
|
||||||
entities.extend(
|
entities.extend(
|
||||||
AdvantageAirRelay(instance, thing)
|
AdvantageAirRelay(instance, thing)
|
||||||
@ -93,6 +96,32 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity):
|
|||||||
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
|
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
|
||||||
|
|
||||||
|
|
||||||
|
class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity):
|
||||||
|
"""Representation of Advantage 'MySleep$aver' Mode control."""
|
||||||
|
|
||||||
|
_attr_icon = "mdi:weather-night"
|
||||||
|
_attr_name = "MySleep$aver"
|
||||||
|
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||||
|
|
||||||
|
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||||
|
"""Initialize an Advantage Air Night Mode control."""
|
||||||
|
super().__init__(instance, ac_key)
|
||||||
|
self._attr_unique_id += "-nightmode"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return the Night Mode status."""
|
||||||
|
return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED]
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn Night Mode on."""
|
||||||
|
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True})
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn Night Mode off."""
|
||||||
|
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False})
|
||||||
|
|
||||||
|
|
||||||
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
|
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
|
||||||
"""Representation of Advantage Air Thing."""
|
"""Representation of Advantage Air Thing."""
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@
|
|||||||
"issues": {
|
"issues": {
|
||||||
"deprecated_yaml_import_issue_cannot_connect": {
|
"deprecated_yaml_import_issue_cannot_connect": {
|
||||||
"title": "The {integration_title} YAML configuration import failed",
|
"title": "The {integration_title} YAML configuration import failed",
|
||||||
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
"description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"discovery_confirm": {
|
"discovery_confirm": {
|
||||||
"description": "Do you want to setup {model}?"
|
"description": "Do you want to set up {model}?"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
|
@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
|||||||
try:
|
try:
|
||||||
await measurements.update()
|
await measurements.update()
|
||||||
except (AirlyError, ClientConnectorError) as error:
|
except (AirlyError, ClientConnectorError) as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="update_error",
|
||||||
|
translation_placeholders={
|
||||||
|
"entry": self.config_entry.title,
|
||||||
|
"error": repr(error),
|
||||||
|
},
|
||||||
|
) from error
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Requests remaining: %s/%s",
|
"Requests remaining: %s/%s",
|
||||||
@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
|||||||
standards = measurements.current["standards"]
|
standards = measurements.current["standards"]
|
||||||
|
|
||||||
if index["description"] == NO_AIRLY_SENSORS:
|
if index["description"] == NO_AIRLY_SENSORS:
|
||||||
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="no_station",
|
||||||
|
translation_placeholders={"entry": self.config_entry.title},
|
||||||
|
)
|
||||||
for value in values:
|
for value in values:
|
||||||
data[value["name"]] = value["value"]
|
data[value["name"]] = value["value"]
|
||||||
for standard in standards:
|
for standard in standards:
|
||||||
|
@ -36,5 +36,13 @@
|
|||||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"update_error": {
|
||||||
|
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
|
||||||
|
},
|
||||||
|
"no_station": {
|
||||||
|
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ from aiohttp import ClientSession
|
|||||||
from aiohttp.client_exceptions import ClientConnectorError
|
from aiohttp.client_exceptions import ClientConnectorError
|
||||||
from pyairnow import WebServiceAPI
|
from pyairnow import WebServiceAPI
|
||||||
from pyairnow.conv import aqi_to_concentration
|
from pyairnow.conv import aqi_to_concentration
|
||||||
from pyairnow.errors import AirNowError
|
from pyairnow.errors import AirNowError, InvalidJsonError
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
distance=self.distance,
|
distance=self.distance,
|
||||||
)
|
)
|
||||||
|
|
||||||
except (AirNowError, ClientConnectorError) as error:
|
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(error) from error
|
||||||
|
|
||||||
if not obs:
|
if not obs:
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||||
"radius": "Station Radius (miles; optional)"
|
"radius": "Station radius (miles; optional)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -25,7 +25,7 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"init": {
|
"init": {
|
||||||
"data": {
|
"data": {
|
||||||
"radius": "Station Radius (miles)"
|
"radius": "Station radius (miles)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@
|
|||||||
"name": "Hydrogen fluoride"
|
"name": "Hydrogen fluoride"
|
||||||
},
|
},
|
||||||
"health_index": {
|
"health_index": {
|
||||||
"name": "Health Index"
|
"name": "Health index"
|
||||||
},
|
},
|
||||||
"absolute_humidity": {
|
"absolute_humidity": {
|
||||||
"name": "Absolute humidity"
|
"name": "Absolute humidity"
|
||||||
@ -112,10 +112,10 @@
|
|||||||
"name": "Oxygen"
|
"name": "Oxygen"
|
||||||
},
|
},
|
||||||
"performance_index": {
|
"performance_index": {
|
||||||
"name": "Performance Index"
|
"name": "Performance index"
|
||||||
},
|
},
|
||||||
"hydrogen_phosphide": {
|
"hydrogen_phosphide": {
|
||||||
"name": "Hydrogen Phosphide"
|
"name": "Hydrogen phosphide"
|
||||||
},
|
},
|
||||||
"relative_pressure": {
|
"relative_pressure": {
|
||||||
"name": "Relative pressure"
|
"name": "Relative pressure"
|
||||||
@ -127,22 +127,22 @@
|
|||||||
"name": "Refrigerant"
|
"name": "Refrigerant"
|
||||||
},
|
},
|
||||||
"silicon_hydride": {
|
"silicon_hydride": {
|
||||||
"name": "Silicon Hydride"
|
"name": "Silicon hydride"
|
||||||
},
|
},
|
||||||
"noise": {
|
"noise": {
|
||||||
"name": "Noise"
|
"name": "Noise"
|
||||||
},
|
},
|
||||||
"maximum_noise": {
|
"maximum_noise": {
|
||||||
"name": "Noise (Maximum)"
|
"name": "Noise (maximum)"
|
||||||
},
|
},
|
||||||
"radon": {
|
"radon": {
|
||||||
"name": "Radon"
|
"name": "Radon"
|
||||||
},
|
},
|
||||||
"industrial_volatile_organic_compounds": {
|
"industrial_volatile_organic_compounds": {
|
||||||
"name": "VOCs (Industrial)"
|
"name": "VOCs (industrial)"
|
||||||
},
|
},
|
||||||
"virus_index": {
|
"virus_index": {
|
||||||
"name": "Virus Index"
|
"name": "Virus index"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device_data(discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
return self.async_abort(reason="cannot_connect")
|
return self.async_abort(reason="cannot_connect")
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
|
_LOGGER.exception("Unknown error occurred")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
|
|
||||||
name = get_name(device)
|
name = get_name(device)
|
||||||
@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device_data(discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
return self.async_abort(reason="cannot_connect")
|
return self.async_abort(reason="cannot_connect")
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
|
_LOGGER.exception("Unknown error occurred")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
name = get_name(device)
|
name = get_name(device)
|
||||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||||
|
@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
||||||
try:
|
try:
|
||||||
await client.test_connection()
|
await client.test_connection()
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors = {"base": "cannot_connect"}
|
errors = {"base": "cannot_connect"}
|
||||||
else:
|
else:
|
||||||
await self.async_set_unique_id(user_input[CONF_HOST])
|
await self.async_set_unique_id(user_input[CONF_HOST])
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"geography_by_coords": {
|
"geography_by_coords": {
|
||||||
"title": "Configure a Geography",
|
"title": "Configure a geography",
|
||||||
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
|
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
|
||||||
"data": {
|
"data": {
|
||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
@ -56,12 +56,12 @@
|
|||||||
"sensor": {
|
"sensor": {
|
||||||
"pollutant_label": {
|
"pollutant_label": {
|
||||||
"state": {
|
"state": {
|
||||||
"co": "Carbon Monoxide",
|
"co": "Carbon monoxide",
|
||||||
"n2": "Nitrogen Dioxide",
|
"n2": "Nitrogen dioxide",
|
||||||
"o3": "Ozone",
|
"o3": "Ozone",
|
||||||
"p1": "PM10",
|
"p1": "PM10",
|
||||||
"p2": "PM2.5",
|
"p2": "PM2.5",
|
||||||
"s2": "Sulfur Dioxide"
|
"s2": "Sulfur dioxide"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pollutant_level": {
|
"pollutant_level": {
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aioairzone_cloud"],
|
"loggers": ["aioairzone_cloud"],
|
||||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
"requirements": ["aioairzone-cloud==0.6.11"]
|
||||||
}
|
}
|
||||||
|
@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
|
|||||||
# Fan preset_mode
|
# Fan preset_mode
|
||||||
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
||||||
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
||||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
|
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
|
||||||
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
||||||
|
|
||||||
# Humidifier mode
|
# Humidifier mode
|
||||||
|
@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
suggested_display_precision=0,
|
suggested_display_precision=0,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=TYPE_WINDGUSTMPH,
|
key=TYPE_WINDGUSTMPH,
|
||||||
|
@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="wind_direction",
|
translation_key="wind_direction",
|
||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=TYPE_WINDDIR_AVG10M,
|
key=TYPE_WINDDIR_AVG10M,
|
||||||
|
@ -8,7 +8,7 @@ from python_homeassistant_analytics import (
|
|||||||
HomeassistantAnalyticsClient,
|
HomeassistantAnalyticsClient,
|
||||||
HomeassistantAnalyticsConnectionError,
|
HomeassistantAnalyticsConnectionError,
|
||||||
)
|
)
|
||||||
from python_homeassistant_analytics.models import IntegrationType
|
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||||
@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
addons = await client.get_addons()
|
addons = await client.get_addons()
|
||||||
integrations = await client.get_integrations()
|
integrations = await client.get_integrations(Environment.NEXT)
|
||||||
custom_integrations = await client.get_custom_integrations()
|
custom_integrations = await client.get_custom_integrations()
|
||||||
except HomeassistantAnalyticsConnectionError:
|
except HomeassistantAnalyticsConnectionError:
|
||||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||||
@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
addons = await client.get_addons()
|
addons = await client.get_addons()
|
||||||
integrations = await client.get_integrations()
|
integrations = await client.get_integrations(Environment.NEXT)
|
||||||
custom_integrations = await client.get_custom_integrations()
|
custom_integrations = await client.get_custom_integrations()
|
||||||
except HomeassistantAnalyticsConnectionError:
|
except HomeassistantAnalyticsConnectionError:
|
||||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["androidtvremote2"],
|
"loggers": ["androidtvremote2"],
|
||||||
"requirements": ["androidtvremote2==0.2.0"],
|
"requirements": ["androidtvremote2==0.2.1"],
|
||||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from anova_wifi import AnovaApi, InvalidLogin
|
from anova_wifi import AnovaApi, InvalidLogin
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
|
||||||
|
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Sets up a config flow for Anova."""
|
"""Sets up a config flow for Anova."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
await api.authenticate()
|
await api.authenticate()
|
||||||
except InvalidLogin:
|
except InvalidLogin:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
|
@ -22,6 +22,7 @@ from . import AnthemavConfigEntry
|
|||||||
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
|
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
VOLUME_STEP = 0.01
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity):
|
|||||||
| MediaPlayerEntityFeature.TURN_OFF
|
| MediaPlayerEntityFeature.TURN_OFF
|
||||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||||
)
|
)
|
||||||
|
_attr_volume_step = VOLUME_STEP
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
from .const import DOMAIN, LOGGER
|
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||||
|
|
||||||
PLATFORMS = (Platform.CONVERSATION,)
|
PLATFORMS = (Platform.CONVERSATION,)
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
@ -26,12 +26,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
|||||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await client.messages.create(
|
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
model="claude-3-haiku-20240307",
|
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||||
max_tokens=1,
|
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||||
messages=[{"role": "user", "content": "Hi"}],
|
|
||||||
timeout=10.0,
|
|
||||||
)
|
|
||||||
except anthropic.AuthenticationError as err:
|
except anthropic.AuthenticationError as err:
|
||||||
LOGGER.error("Invalid API key: %s", err)
|
LOGGER.error("Invalid API key: %s", err)
|
||||||
return False
|
return False
|
||||||
|
@ -34,10 +34,12 @@ from .const import (
|
|||||||
CONF_PROMPT,
|
CONF_PROMPT,
|
||||||
CONF_RECOMMENDED,
|
CONF_RECOMMENDED,
|
||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
|
CONF_THINKING_BUDGET,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
|
RECOMMENDED_THINKING_BUDGET,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -63,12 +65,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
|||||||
client = await hass.async_add_executor_job(
|
client = await hass.async_add_executor_job(
|
||||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||||
)
|
)
|
||||||
await client.messages.create(
|
await client.models.list(timeout=10.0)
|
||||||
model="claude-3-haiku-20240307",
|
|
||||||
max_tokens=1,
|
|
||||||
messages=[{"role": "user", "content": "Hi"}],
|
|
||||||
timeout=10.0,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
@ -133,21 +130,29 @@ class AnthropicOptionsFlow(OptionsFlow):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Manage the options."""
|
"""Manage the options."""
|
||||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||||
if user_input[CONF_LLM_HASS_API] == "none":
|
if user_input[CONF_LLM_HASS_API] == "none":
|
||||||
user_input.pop(CONF_LLM_HASS_API)
|
user_input.pop(CONF_LLM_HASS_API)
|
||||||
return self.async_create_entry(title="", data=user_input)
|
|
||||||
|
|
||||||
# Re-render the options again, now with the recommended options shown/hidden
|
if user_input.get(
|
||||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||||
|
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||||
|
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||||
|
|
||||||
options = {
|
if not errors:
|
||||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
return self.async_create_entry(title="", data=user_input)
|
||||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
else:
|
||||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
# Re-render the options again, now with the recommended options shown/hidden
|
||||||
}
|
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||||
|
|
||||||
|
options = {
|
||||||
|
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||||
|
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||||
|
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||||
|
}
|
||||||
|
|
||||||
suggested_values = options.copy()
|
suggested_values = options.copy()
|
||||||
if not suggested_values.get(CONF_PROMPT):
|
if not suggested_values.get(CONF_PROMPT):
|
||||||
@ -161,6 +166,7 @@ class AnthropicOptionsFlow(OptionsFlow):
|
|||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="init",
|
step_id="init",
|
||||||
data_schema=schema,
|
data_schema=schema,
|
||||||
|
errors=errors or None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -210,6 +216,10 @@ def anthropic_config_option_schema(
|
|||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
default=RECOMMENDED_TEMPERATURE,
|
default=RECOMMENDED_TEMPERATURE,
|
||||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||||
|
vol.Optional(
|
||||||
|
CONF_THINKING_BUDGET,
|
||||||
|
default=RECOMMENDED_THINKING_BUDGET,
|
||||||
|
): int,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return schema
|
return schema
|
||||||
|
@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens"
|
|||||||
RECOMMENDED_MAX_TOKENS = 1024
|
RECOMMENDED_MAX_TOKENS = 1024
|
||||||
CONF_TEMPERATURE = "temperature"
|
CONF_TEMPERATURE = "temperature"
|
||||||
RECOMMENDED_TEMPERATURE = 1.0
|
RECOMMENDED_TEMPERATURE = 1.0
|
||||||
|
CONF_THINKING_BUDGET = "thinking_budget"
|
||||||
|
RECOMMENDED_THINKING_BUDGET = 0
|
||||||
|
MIN_THINKING_BUDGET = 1024
|
||||||
|
|
||||||
|
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||||
|
@ -1,23 +1,32 @@
|
|||||||
"""Conversation support for Anthropic."""
|
"""Conversation support for Anthropic."""
|
||||||
|
|
||||||
from collections.abc import AsyncGenerator, Callable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
import json
|
import json
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal, cast
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic._types import NOT_GIVEN
|
from anthropic._types import NOT_GIVEN
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
Message,
|
|
||||||
MessageParam,
|
MessageParam,
|
||||||
MessageStreamEvent,
|
MessageStreamEvent,
|
||||||
RawContentBlockDeltaEvent,
|
RawContentBlockDeltaEvent,
|
||||||
RawContentBlockStartEvent,
|
RawContentBlockStartEvent,
|
||||||
RawContentBlockStopEvent,
|
RawContentBlockStopEvent,
|
||||||
|
RawMessageStartEvent,
|
||||||
|
RawMessageStopEvent,
|
||||||
|
RedactedThinkingBlock,
|
||||||
|
RedactedThinkingBlockParam,
|
||||||
|
SignatureDelta,
|
||||||
TextBlock,
|
TextBlock,
|
||||||
TextBlockParam,
|
TextBlockParam,
|
||||||
TextDelta,
|
TextDelta,
|
||||||
|
ThinkingBlock,
|
||||||
|
ThinkingBlockParam,
|
||||||
|
ThinkingConfigDisabledParam,
|
||||||
|
ThinkingConfigEnabledParam,
|
||||||
|
ThinkingDelta,
|
||||||
ToolParam,
|
ToolParam,
|
||||||
ToolResultBlockParam,
|
ToolResultBlockParam,
|
||||||
ToolUseBlock,
|
ToolUseBlock,
|
||||||
@ -30,7 +39,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import chat_session, device_registry as dr, intent, llm
|
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AnthropicConfigEntry
|
from . import AnthropicConfigEntry
|
||||||
@ -39,11 +48,15 @@ from .const import (
|
|||||||
CONF_MAX_TOKENS,
|
CONF_MAX_TOKENS,
|
||||||
CONF_PROMPT,
|
CONF_PROMPT,
|
||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
|
CONF_THINKING_BUDGET,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
MIN_THINKING_BUDGET,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
|
RECOMMENDED_THINKING_BUDGET,
|
||||||
|
THINKING_MODELS,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Max number of back and forth with the LLM to generate a response
|
# Max number of back and forth with the LLM to generate a response
|
||||||
@ -71,73 +84,101 @@ def _format_tool(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _message_convert(
|
def _convert_content(
|
||||||
message: Message,
|
chat_content: Iterable[conversation.Content],
|
||||||
) -> MessageParam:
|
) -> list[MessageParam]:
|
||||||
"""Convert from class to TypedDict."""
|
"""Transform HA chat_log content into Anthropic API format."""
|
||||||
param_content: list[TextBlockParam | ToolUseBlockParam] = []
|
messages: list[MessageParam] = []
|
||||||
|
|
||||||
for message_content in message.content:
|
for content in chat_content:
|
||||||
if isinstance(message_content, TextBlock):
|
if isinstance(content, conversation.ToolResultContent):
|
||||||
param_content.append(TextBlockParam(type="text", text=message_content.text))
|
tool_result_block = ToolResultBlockParam(
|
||||||
elif isinstance(message_content, ToolUseBlock):
|
type="tool_result",
|
||||||
param_content.append(
|
tool_use_id=content.tool_call_id,
|
||||||
ToolUseBlockParam(
|
content=json.dumps(content.tool_result),
|
||||||
type="tool_use",
|
|
||||||
id=message_content.id,
|
|
||||||
name=message_content.name,
|
|
||||||
input=message_content.input,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
if not messages or messages[-1]["role"] != "user":
|
||||||
return MessageParam(role=message.role, content=param_content)
|
messages.append(
|
||||||
|
MessageParam(
|
||||||
|
role="user",
|
||||||
def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
content=[tool_result_block],
|
||||||
"""Create tool response content."""
|
|
||||||
if isinstance(chat_content, conversation.ToolResultContent):
|
|
||||||
return MessageParam(
|
|
||||||
role="user",
|
|
||||||
content=[
|
|
||||||
ToolResultBlockParam(
|
|
||||||
type="tool_result",
|
|
||||||
tool_use_id=chat_content.tool_call_id,
|
|
||||||
content=json.dumps(chat_content.tool_result),
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
if isinstance(chat_content, conversation.AssistantContent):
|
|
||||||
return MessageParam(
|
|
||||||
role="assistant",
|
|
||||||
content=[
|
|
||||||
TextBlockParam(type="text", text=chat_content.content or ""),
|
|
||||||
*[
|
|
||||||
ToolUseBlockParam(
|
|
||||||
type="tool_use",
|
|
||||||
id=tool_call.id,
|
|
||||||
name=tool_call.tool_name,
|
|
||||||
input=tool_call.tool_args,
|
|
||||||
)
|
)
|
||||||
for tool_call in chat_content.tool_calls or ()
|
)
|
||||||
],
|
elif isinstance(messages[-1]["content"], str):
|
||||||
],
|
messages[-1]["content"] = [
|
||||||
)
|
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||||
if isinstance(chat_content, conversation.UserContent):
|
tool_result_block,
|
||||||
return MessageParam(
|
]
|
||||||
role="user",
|
else:
|
||||||
content=chat_content.content,
|
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||||
)
|
elif isinstance(content, conversation.UserContent):
|
||||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
# Combine consequent user messages
|
||||||
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
if not messages or messages[-1]["role"] != "user":
|
||||||
|
messages.append(
|
||||||
|
MessageParam(
|
||||||
|
role="user",
|
||||||
|
content=content.content,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif isinstance(messages[-1]["content"], str):
|
||||||
|
messages[-1]["content"] = [
|
||||||
|
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||||
|
TextBlockParam(type="text", text=content.content),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||||
|
TextBlockParam(type="text", text=content.content)
|
||||||
|
)
|
||||||
|
elif isinstance(content, conversation.AssistantContent):
|
||||||
|
# Combine consequent assistant messages
|
||||||
|
if not messages or messages[-1]["role"] != "assistant":
|
||||||
|
messages.append(
|
||||||
|
MessageParam(
|
||||||
|
role="assistant",
|
||||||
|
content=[],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if content.content:
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
TextBlockParam(type="text", text=content.content)
|
||||||
|
)
|
||||||
|
if content.tool_calls:
|
||||||
|
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||||
|
[
|
||||||
|
ToolUseBlockParam(
|
||||||
|
type="tool_use",
|
||||||
|
id=tool_call.id,
|
||||||
|
name=tool_call.tool_name,
|
||||||
|
input=tool_call.tool_args,
|
||||||
|
)
|
||||||
|
for tool_call in content.tool_calls
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||||
|
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||||
|
|
||||||
|
return messages
|
||||||
|
|
||||||
|
|
||||||
async def _transform_stream(
|
async def _transform_stream(
|
||||||
result: AsyncStream[MessageStreamEvent],
|
result: AsyncStream[MessageStreamEvent],
|
||||||
|
messages: list[MessageParam],
|
||||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||||
"""Transform the response stream into HA format.
|
"""Transform the response stream into HA format.
|
||||||
|
|
||||||
A typical stream of responses might look something like the following:
|
A typical stream of responses might look something like the following:
|
||||||
- RawMessageStartEvent with no content
|
- RawMessageStartEvent with no content
|
||||||
|
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||||
|
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||||
|
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||||
|
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||||
|
- ...
|
||||||
|
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||||
|
- RawContentBlockStopEvent
|
||||||
|
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||||
|
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||||
- RawContentBlockStartEvent with an empty TextBlock
|
- RawContentBlockStartEvent with an empty TextBlock
|
||||||
- RawContentBlockDeltaEvent with a TextDelta
|
- RawContentBlockDeltaEvent with a TextDelta
|
||||||
- RawContentBlockDeltaEvent with a TextDelta
|
- RawContentBlockDeltaEvent with a TextDelta
|
||||||
@ -151,44 +192,103 @@ async def _transform_stream(
|
|||||||
- RawContentBlockStopEvent
|
- RawContentBlockStopEvent
|
||||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||||
- RawMessageStopEvent(type='message_stop')
|
- RawMessageStopEvent(type='message_stop')
|
||||||
|
|
||||||
|
Each message could contain multiple blocks of the same type.
|
||||||
"""
|
"""
|
||||||
if result is None:
|
if result is None:
|
||||||
raise TypeError("Expected a stream of messages")
|
raise TypeError("Expected a stream of messages")
|
||||||
|
|
||||||
current_tool_call: dict | None = None
|
current_message: MessageParam | None = None
|
||||||
|
current_block: (
|
||||||
|
TextBlockParam
|
||||||
|
| ToolUseBlockParam
|
||||||
|
| ThinkingBlockParam
|
||||||
|
| RedactedThinkingBlockParam
|
||||||
|
| None
|
||||||
|
) = None
|
||||||
|
current_tool_args: str
|
||||||
|
|
||||||
async for response in result:
|
async for response in result:
|
||||||
LOGGER.debug("Received response: %s", response)
|
LOGGER.debug("Received response: %s", response)
|
||||||
|
|
||||||
if isinstance(response, RawContentBlockStartEvent):
|
if isinstance(response, RawMessageStartEvent):
|
||||||
|
if response.message.role != "assistant":
|
||||||
|
raise ValueError("Unexpected message role")
|
||||||
|
current_message = MessageParam(role=response.message.role, content=[])
|
||||||
|
elif isinstance(response, RawContentBlockStartEvent):
|
||||||
if isinstance(response.content_block, ToolUseBlock):
|
if isinstance(response.content_block, ToolUseBlock):
|
||||||
current_tool_call = {
|
current_block = ToolUseBlockParam(
|
||||||
"id": response.content_block.id,
|
type="tool_use",
|
||||||
"name": response.content_block.name,
|
id=response.content_block.id,
|
||||||
"input": "",
|
name=response.content_block.name,
|
||||||
}
|
input="",
|
||||||
|
)
|
||||||
|
current_tool_args = ""
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
|
current_block = TextBlockParam(
|
||||||
|
type="text", text=response.content_block.text
|
||||||
|
)
|
||||||
yield {"role": "assistant"}
|
yield {"role": "assistant"}
|
||||||
|
if response.content_block.text:
|
||||||
|
yield {"content": response.content_block.text}
|
||||||
|
elif isinstance(response.content_block, ThinkingBlock):
|
||||||
|
current_block = ThinkingBlockParam(
|
||||||
|
type="thinking",
|
||||||
|
thinking=response.content_block.thinking,
|
||||||
|
signature=response.content_block.signature,
|
||||||
|
)
|
||||||
|
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||||
|
current_block = RedactedThinkingBlockParam(
|
||||||
|
type="redacted_thinking", data=response.content_block.data
|
||||||
|
)
|
||||||
|
LOGGER.debug(
|
||||||
|
"Some of Claude’s internal reasoning has been automatically "
|
||||||
|
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||||
|
"responses"
|
||||||
|
)
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
|
if current_block is None:
|
||||||
|
raise ValueError("Unexpected delta without a block")
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
if current_tool_call is None:
|
current_tool_args += response.delta.partial_json
|
||||||
raise ValueError("Unexpected delta without a tool call")
|
|
||||||
current_tool_call["input"] += response.delta.partial_json
|
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
LOGGER.debug("yielding delta: %s", response.delta.text)
|
text_block = cast(TextBlockParam, current_block)
|
||||||
|
text_block["text"] += response.delta.text
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
|
elif isinstance(response.delta, ThinkingDelta):
|
||||||
|
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||||
|
thinking_block["thinking"] += response.delta.thinking
|
||||||
|
elif isinstance(response.delta, SignatureDelta):
|
||||||
|
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||||
|
thinking_block["signature"] += response.delta.signature
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_tool_call:
|
if current_block is None:
|
||||||
|
raise ValueError("Unexpected stop event without a current block")
|
||||||
|
if current_block["type"] == "tool_use":
|
||||||
|
tool_block = cast(ToolUseBlockParam, current_block)
|
||||||
|
tool_args = json.loads(current_tool_args)
|
||||||
|
tool_block["input"] = tool_args
|
||||||
yield {
|
yield {
|
||||||
"tool_calls": [
|
"tool_calls": [
|
||||||
llm.ToolInput(
|
llm.ToolInput(
|
||||||
id=current_tool_call["id"],
|
id=tool_block["id"],
|
||||||
tool_name=current_tool_call["name"],
|
tool_name=tool_block["name"],
|
||||||
tool_args=json.loads(current_tool_call["input"]),
|
tool_args=tool_args,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
current_tool_call = None
|
elif current_block["type"] == "thinking":
|
||||||
|
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||||
|
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
|
||||||
|
|
||||||
|
if current_message is None:
|
||||||
|
raise ValueError("Unexpected stop event without a current message")
|
||||||
|
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||||
|
current_block = None
|
||||||
|
elif isinstance(response, RawMessageStopEvent):
|
||||||
|
if current_message is not None:
|
||||||
|
messages.append(current_message)
|
||||||
|
current_message = None
|
||||||
|
|
||||||
|
|
||||||
class AnthropicConversationEntity(
|
class AnthropicConversationEntity(
|
||||||
@ -226,18 +326,6 @@ class AnthropicConversationEntity(
|
|||||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_process(
|
|
||||||
self, user_input: conversation.ConversationInput
|
|
||||||
) -> conversation.ConversationResult:
|
|
||||||
"""Process a sentence."""
|
|
||||||
with (
|
|
||||||
chat_session.async_get_chat_session(
|
|
||||||
self.hass, user_input.conversation_id
|
|
||||||
) as session,
|
|
||||||
conversation.async_get_chat_log(self.hass, session, user_input) as chat_log,
|
|
||||||
):
|
|
||||||
return await self._async_handle_message(user_input, chat_log)
|
|
||||||
|
|
||||||
async def _async_handle_message(
|
async def _async_handle_message(
|
||||||
self,
|
self,
|
||||||
user_input: conversation.ConversationInput,
|
user_input: conversation.ConversationInput,
|
||||||
@ -266,34 +354,50 @@ class AnthropicConversationEntity(
|
|||||||
system = chat_log.content[0]
|
system = chat_log.content[0]
|
||||||
if not isinstance(system, conversation.SystemContent):
|
if not isinstance(system, conversation.SystemContent):
|
||||||
raise TypeError("First message must be a system message")
|
raise TypeError("First message must be a system message")
|
||||||
messages = [_convert_content(content) for content in chat_log.content[1:]]
|
messages = _convert_content(chat_log.content[1:])
|
||||||
|
|
||||||
client = self.entry.runtime_data
|
client = self.entry.runtime_data
|
||||||
|
|
||||||
|
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||||
|
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
|
|
||||||
# To prevent infinite loops, we limit the number of iterations
|
# To prevent infinite loops, we limit the number of iterations
|
||||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||||
try:
|
model_args = {
|
||||||
stream = await client.messages.create(
|
"model": model,
|
||||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
"messages": messages,
|
||||||
messages=messages,
|
"tools": tools or NOT_GIVEN,
|
||||||
tools=tools or NOT_GIVEN,
|
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
"system": system.content,
|
||||||
system=system.content,
|
"stream": True,
|
||||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
}
|
||||||
stream=True,
|
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||||
|
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||||
|
type="enabled", budget_tokens=thinking_budget
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||||
|
model_args["temperature"] = options.get(
|
||||||
|
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream = await client.messages.create(**model_args)
|
||||||
except anthropic.AnthropicError as err:
|
except anthropic.AnthropicError as err:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
messages.extend(
|
messages.extend(
|
||||||
[
|
_convert_content(
|
||||||
_convert_content(content)
|
[
|
||||||
async for content in chat_log.async_add_delta_content_stream(
|
content
|
||||||
user_input.agent_id, _transform_stream(stream)
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
)
|
user_input.agent_id, _transform_stream(stream, messages)
|
||||||
]
|
)
|
||||||
|
if not isinstance(content, conversation.AssistantContent)
|
||||||
|
]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not chat_log.unresponded_tool_results:
|
if not chat_log.unresponded_tool_results:
|
||||||
@ -305,7 +409,9 @@ class AnthropicConversationEntity(
|
|||||||
intent_response = intent.IntentResponse(language=user_input.language)
|
intent_response = intent.IntentResponse(language=user_input.language)
|
||||||
intent_response.async_set_speech(response_content.content or "")
|
intent_response.async_set_speech(response_content.content or "")
|
||||||
return conversation.ConversationResult(
|
return conversation.ConversationResult(
|
||||||
response=intent_response, conversation_id=chat_log.conversation_id
|
response=intent_response,
|
||||||
|
conversation_id=chat_log.conversation_id,
|
||||||
|
continue_conversation=chat_log.continue_conversation,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_entry_update_listener(
|
async def _async_entry_update_listener(
|
||||||
|
@ -23,12 +23,17 @@
|
|||||||
"max_tokens": "Maximum tokens to return in response",
|
"max_tokens": "Maximum tokens to return in response",
|
||||||
"temperature": "Temperature",
|
"temperature": "Temperature",
|
||||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||||
"recommended": "Recommended model settings"
|
"recommended": "Recommended model settings",
|
||||||
|
"thinking_budget_tokens": "Thinking budget"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||||
|
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@
|
|||||||
"name": "Status date"
|
"name": "Status date"
|
||||||
},
|
},
|
||||||
"dip_switch_settings": {
|
"dip_switch_settings": {
|
||||||
"name": "Dip switch settings"
|
"name": "DIP switch settings"
|
||||||
},
|
},
|
||||||
"low_battery_signal": {
|
"low_battery_signal": {
|
||||||
"name": "Low battery signal"
|
"name": "Low battery signal"
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyaprilaire"],
|
"loggers": ["pyaprilaire"],
|
||||||
"requirements": ["pyaprilaire==0.7.7"]
|
"requirements": ["pyaprilaire==0.8.1"]
|
||||||
}
|
}
|
||||||
|
@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except AuthenticationFailed:
|
except AuthenticationFailed:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
@ -6,7 +6,11 @@ import logging
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components import mqtt
|
from homeassistant.components import mqtt
|
||||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
from homeassistant.components.sensor import (
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorStateClass,
|
||||||
|
)
|
||||||
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
|
|||||||
DEGREE,
|
DEGREE,
|
||||||
"mdi:compass",
|
"mdi:compass",
|
||||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
return None
|
return None
|
||||||
@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity):
|
|||||||
units: str,
|
units: str,
|
||||||
icon: str | None = None,
|
icon: str | None = None,
|
||||||
device_class: SensorDeviceClass | None = None,
|
device_class: SensorDeviceClass | None = None,
|
||||||
|
state_class: SensorStateClass | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
self.entity_id = _slug(name)
|
self.entity_id = _slug(name)
|
||||||
@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity):
|
|||||||
self._attr_native_unit_of_measurement = units
|
self._attr_native_unit_of_measurement = units
|
||||||
self._attr_icon = icon
|
self._attr_icon = icon
|
||||||
self._attr_device_class = device_class
|
self._attr_device_class = device_class
|
||||||
|
self._attr_state_class = state_class
|
||||||
|
|
||||||
def set_event(self, event: dict[str, Any]) -> None:
|
def set_event(self, event: dict[str, Any]) -> None:
|
||||||
"""Update the sensor with the most recent event."""
|
"""Update the sensor with the most recent event."""
|
||||||
|
@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
|
|||||||
"""
|
"""
|
||||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||||
pipeline_input = PipelineInput(
|
pipeline_input = PipelineInput(
|
||||||
conversation_id=session.conversation_id,
|
session=session,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
stt_metadata=stt_metadata,
|
stt_metadata=stt_metadata,
|
||||||
stt_stream=stt_stream,
|
stt_stream=stt_stream,
|
||||||
|
@ -19,14 +19,7 @@ import wave
|
|||||||
import hass_nabucasa
|
import hass_nabucasa
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import (
|
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||||
conversation,
|
|
||||||
media_source,
|
|
||||||
stt,
|
|
||||||
tts,
|
|
||||||
wake_word,
|
|
||||||
websocket_api,
|
|
||||||
)
|
|
||||||
from homeassistant.components.tts import (
|
from homeassistant.components.tts import (
|
||||||
generate_media_source_id as tts_generate_media_source_id,
|
generate_media_source_id as tts_generate_media_source_id,
|
||||||
)
|
)
|
||||||
@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||||
|
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||||
|
"pipeline_conversation_data"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_language(data: dict[str, Any]) -> Any:
|
def validate_language(data: dict[str, Any]) -> Any:
|
||||||
@ -129,7 +125,7 @@ SAVE_DELAY = 10
|
|||||||
@callback
|
@callback
|
||||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||||
"""Filter out intents that are not local fallback."""
|
"""Filter out intents that are not local fallback."""
|
||||||
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
|
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -566,8 +562,7 @@ class PipelineRun:
|
|||||||
|
|
||||||
id: str = field(default_factory=ulid_util.ulid_now)
|
id: str = field(default_factory=ulid_util.ulid_now)
|
||||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||||
tts_engine: str = field(init=False, repr=False)
|
tts_stream: tts.ResultStream | None = field(init=False, default=None)
|
||||||
tts_options: dict | None = field(init=False, default=None)
|
|
||||||
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
||||||
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
||||||
|
|
||||||
@ -590,6 +585,12 @@ class PipelineRun:
|
|||||||
_device_id: str | None = None
|
_device_id: str | None = None
|
||||||
"""Optional device id set during run start."""
|
"""Optional device id set during run start."""
|
||||||
|
|
||||||
|
_conversation_data: PipelineConversationData | None = None
|
||||||
|
"""Data tied to the conversation ID."""
|
||||||
|
|
||||||
|
_intent_agent_only = False
|
||||||
|
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||||
|
|
||||||
def __post_init__(self) -> None:
|
def __post_init__(self) -> None:
|
||||||
"""Set language for pipeline."""
|
"""Set language for pipeline."""
|
||||||
self.language = self.pipeline.language or self.hass.config.language
|
self.language = self.pipeline.language or self.hass.config.language
|
||||||
@ -639,13 +640,19 @@ class PipelineRun:
|
|||||||
self._device_id = device_id
|
self._device_id = device_id
|
||||||
self._start_debug_recording_thread()
|
self._start_debug_recording_thread()
|
||||||
|
|
||||||
data = {
|
data: dict[str, Any] = {
|
||||||
"pipeline": self.pipeline.id,
|
"pipeline": self.pipeline.id,
|
||||||
"language": self.language,
|
"language": self.language,
|
||||||
"conversation_id": conversation_id,
|
"conversation_id": conversation_id,
|
||||||
}
|
}
|
||||||
if self.runner_data is not None:
|
if self.runner_data is not None:
|
||||||
data["runner_data"] = self.runner_data
|
data["runner_data"] = self.runner_data
|
||||||
|
if self.tts_stream:
|
||||||
|
data["tts_output"] = {
|
||||||
|
"token": self.tts_stream.token,
|
||||||
|
"url": self.tts_stream.url,
|
||||||
|
"mime_type": self.tts_stream.content_type,
|
||||||
|
}
|
||||||
|
|
||||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||||
|
|
||||||
@ -1007,19 +1014,36 @@ class PipelineRun:
|
|||||||
|
|
||||||
yield chunk.audio
|
yield chunk.audio
|
||||||
|
|
||||||
async def prepare_recognize_intent(self) -> None:
|
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
|
||||||
"""Prepare recognizing an intent."""
|
"""Prepare recognizing an intent."""
|
||||||
agent_info = conversation.async_get_agent_info(
|
self._conversation_data = async_get_pipeline_conversation_data(
|
||||||
self.hass,
|
self.hass, session
|
||||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if agent_info is None:
|
if self._conversation_data.continue_conversation_agent is not None:
|
||||||
engine = self.pipeline.conversation_engine or "default"
|
agent_info = conversation.async_get_agent_info(
|
||||||
raise IntentRecognitionError(
|
self.hass, self._conversation_data.continue_conversation_agent
|
||||||
code="intent-not-supported",
|
|
||||||
message=f"Intent recognition engine {engine} is not found",
|
|
||||||
)
|
)
|
||||||
|
self._conversation_data.continue_conversation_agent = None
|
||||||
|
if agent_info is None:
|
||||||
|
raise IntentRecognitionError(
|
||||||
|
code="intent-agent-not-found",
|
||||||
|
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
|
||||||
|
)
|
||||||
|
self._intent_agent_only = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
agent_info = conversation.async_get_agent_info(
|
||||||
|
self.hass,
|
||||||
|
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
if agent_info is None:
|
||||||
|
engine = self.pipeline.conversation_engine or "default"
|
||||||
|
raise IntentRecognitionError(
|
||||||
|
code="intent-not-supported",
|
||||||
|
message=f"Intent recognition engine {engine} is not found",
|
||||||
|
)
|
||||||
|
|
||||||
self.intent_agent = agent_info.id
|
self.intent_agent = agent_info.id
|
||||||
|
|
||||||
@ -1031,7 +1055,7 @@ class PipelineRun:
|
|||||||
conversation_extra_system_prompt: str | None,
|
conversation_extra_system_prompt: str | None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||||
if self.intent_agent is None:
|
if self.intent_agent is None or self._conversation_data is None:
|
||||||
raise RuntimeError("Recognize intent was not prepared")
|
raise RuntimeError("Recognize intent was not prepared")
|
||||||
|
|
||||||
if self.pipeline.conversation_language == MATCH_ALL:
|
if self.pipeline.conversation_language == MATCH_ALL:
|
||||||
@ -1078,7 +1102,7 @@ class PipelineRun:
|
|||||||
agent_id = self.intent_agent
|
agent_id = self.intent_agent
|
||||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||||
intent_response: intent.IntentResponse | None = None
|
intent_response: intent.IntentResponse | None = None
|
||||||
if not processed_locally:
|
if not processed_locally and not self._intent_agent_only:
|
||||||
# Sentence triggers override conversation agent
|
# Sentence triggers override conversation agent
|
||||||
if (
|
if (
|
||||||
trigger_response_text
|
trigger_response_text
|
||||||
@ -1195,6 +1219,9 @@ class PipelineRun:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if conversation_result.continue_conversation:
|
||||||
|
self._conversation_data.continue_conversation_agent = agent_id
|
||||||
|
|
||||||
return speech
|
return speech
|
||||||
|
|
||||||
async def prepare_text_to_speech(self) -> None:
|
async def prepare_text_to_speech(self) -> None:
|
||||||
@ -1217,36 +1244,31 @@ class PipelineRun:
|
|||||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
||||||
|
|
||||||
try:
|
try:
|
||||||
options_supported = await tts.async_support_options(
|
self.tts_stream = tts.async_create_stream(
|
||||||
self.hass,
|
hass=self.hass,
|
||||||
engine,
|
engine=engine,
|
||||||
self.pipeline.tts_language,
|
language=self.pipeline.tts_language,
|
||||||
tts_options,
|
options=tts_options,
|
||||||
)
|
)
|
||||||
except HomeAssistantError as err:
|
except HomeAssistantError as err:
|
||||||
raise TextToSpeechError(
|
|
||||||
code="tts-not-supported",
|
|
||||||
message=f"Text-to-speech engine '{engine}' not found",
|
|
||||||
) from err
|
|
||||||
if not options_supported:
|
|
||||||
raise TextToSpeechError(
|
raise TextToSpeechError(
|
||||||
code="tts-not-supported",
|
code="tts-not-supported",
|
||||||
message=(
|
message=(
|
||||||
f"Text-to-speech engine {engine} "
|
f"Text-to-speech engine {engine} "
|
||||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
|
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
|
||||||
|
f" {err}"
|
||||||
),
|
),
|
||||||
)
|
) from err
|
||||||
|
|
||||||
self.tts_engine = engine
|
|
||||||
self.tts_options = tts_options
|
|
||||||
|
|
||||||
async def text_to_speech(self, tts_input: str) -> None:
|
async def text_to_speech(self, tts_input: str) -> None:
|
||||||
"""Run text-to-speech portion of pipeline."""
|
"""Run text-to-speech portion of pipeline."""
|
||||||
|
assert self.tts_stream is not None
|
||||||
|
|
||||||
self.process_event(
|
self.process_event(
|
||||||
PipelineEvent(
|
PipelineEvent(
|
||||||
PipelineEventType.TTS_START,
|
PipelineEventType.TTS_START,
|
||||||
{
|
{
|
||||||
"engine": self.tts_engine,
|
"engine": self.tts_stream.engine,
|
||||||
"language": self.pipeline.tts_language,
|
"language": self.pipeline.tts_language,
|
||||||
"voice": self.pipeline.tts_voice,
|
"voice": self.pipeline.tts_voice,
|
||||||
"tts_input": tts_input,
|
"tts_input": tts_input,
|
||||||
@ -1259,14 +1281,9 @@ class PipelineRun:
|
|||||||
tts_media_id = tts_generate_media_source_id(
|
tts_media_id = tts_generate_media_source_id(
|
||||||
self.hass,
|
self.hass,
|
||||||
tts_input,
|
tts_input,
|
||||||
engine=self.tts_engine,
|
engine=self.tts_stream.engine,
|
||||||
language=self.pipeline.tts_language,
|
language=self.tts_stream.language,
|
||||||
options=self.tts_options,
|
options=self.tts_stream.options,
|
||||||
)
|
|
||||||
tts_media = await media_source.async_resolve_media(
|
|
||||||
self.hass,
|
|
||||||
tts_media_id,
|
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
except Exception as src_error:
|
except Exception as src_error:
|
||||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||||
@ -1275,10 +1292,13 @@ class PipelineRun:
|
|||||||
message="Unexpected error during text-to-speech",
|
message="Unexpected error during text-to-speech",
|
||||||
) from src_error
|
) from src_error
|
||||||
|
|
||||||
_LOGGER.debug("TTS result %s", tts_media)
|
self.tts_stream.async_set_message(tts_input)
|
||||||
|
|
||||||
tts_output = {
|
tts_output = {
|
||||||
"media_id": tts_media_id,
|
"media_id": tts_media_id,
|
||||||
**asdict(tts_media),
|
"token": self.tts_stream.token,
|
||||||
|
"url": self.tts_stream.url,
|
||||||
|
"mime_type": self.tts_stream.content_type,
|
||||||
}
|
}
|
||||||
|
|
||||||
self.process_event(
|
self.process_event(
|
||||||
@ -1458,8 +1478,8 @@ class PipelineInput:
|
|||||||
|
|
||||||
run: PipelineRun
|
run: PipelineRun
|
||||||
|
|
||||||
conversation_id: str
|
session: chat_session.ChatSession
|
||||||
"""Identifier for the conversation."""
|
"""Session for the conversation."""
|
||||||
|
|
||||||
stt_metadata: stt.SpeechMetadata | None = None
|
stt_metadata: stt.SpeechMetadata | None = None
|
||||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||||
@ -1484,7 +1504,9 @@ class PipelineInput:
|
|||||||
|
|
||||||
async def execute(self) -> None:
|
async def execute(self) -> None:
|
||||||
"""Run pipeline."""
|
"""Run pipeline."""
|
||||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
self.run.start(
|
||||||
|
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||||
|
)
|
||||||
current_stage: PipelineStage | None = self.run.start_stage
|
current_stage: PipelineStage | None = self.run.start_stage
|
||||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||||
@ -1568,7 +1590,7 @@ class PipelineInput:
|
|||||||
assert intent_input is not None
|
assert intent_input is not None
|
||||||
tts_input = await self.run.recognize_intent(
|
tts_input = await self.run.recognize_intent(
|
||||||
intent_input,
|
intent_input,
|
||||||
self.conversation_id,
|
self.session.conversation_id,
|
||||||
self.device_id,
|
self.device_id,
|
||||||
self.conversation_extra_system_prompt,
|
self.conversation_extra_system_prompt,
|
||||||
)
|
)
|
||||||
@ -1652,7 +1674,7 @@ class PipelineInput:
|
|||||||
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
||||||
<= end_stage_index
|
<= end_stage_index
|
||||||
):
|
):
|
||||||
prepare_tasks.append(self.run.prepare_recognize_intent())
|
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
|
||||||
|
|
||||||
if (
|
if (
|
||||||
start_stage_index
|
start_stage_index
|
||||||
@ -1931,7 +1953,7 @@ class PipelineRunDebug:
|
|||||||
|
|
||||||
|
|
||||||
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||||
"""Store entity registry data."""
|
"""Store pipeline data."""
|
||||||
|
|
||||||
async def _async_migrate_func(
|
async def _async_migrate_func(
|
||||||
self,
|
self,
|
||||||
@ -2013,3 +2035,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
for pipeline, attr_updates in updates:
|
for pipeline, attr_updates in updates:
|
||||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PipelineConversationData:
|
||||||
|
"""Hold data for the duration of a conversation."""
|
||||||
|
|
||||||
|
continue_conversation_agent: str | None = None
|
||||||
|
"""The agent that requested the conversation to be continued."""
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_get_pipeline_conversation_data(
|
||||||
|
hass: HomeAssistant, session: chat_session.ChatSession
|
||||||
|
) -> PipelineConversationData:
|
||||||
|
"""Get the pipeline data for a specific conversation."""
|
||||||
|
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
|
||||||
|
if all_conversation_data is None:
|
||||||
|
all_conversation_data = {}
|
||||||
|
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
|
||||||
|
|
||||||
|
data = all_conversation_data.get(session.conversation_id)
|
||||||
|
|
||||||
|
if data is not None:
|
||||||
|
return data
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def do_cleanup() -> None:
|
||||||
|
"""Handle cleanup."""
|
||||||
|
all_conversation_data.pop(session.conversation_id)
|
||||||
|
|
||||||
|
session.async_on_cleanup(do_cleanup)
|
||||||
|
|
||||||
|
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
|
||||||
|
return data
|
||||||
|
@ -239,7 +239,7 @@ async def websocket_run(
|
|||||||
with chat_session.async_get_chat_session(
|
with chat_session.async_get_chat_session(
|
||||||
hass, msg.get("conversation_id")
|
hass, msg.get("conversation_id")
|
||||||
) as session:
|
) as session:
|
||||||
input_args["conversation_id"] = session.conversation_id
|
input_args["session"] = session
|
||||||
pipeline_input = PipelineInput(**input_args)
|
pipeline_input = PipelineInput(**input_args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
"""Base class for assist satellite entities."""
|
"""Base class for assist satellite entities."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.http import StaticPathConfig
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
@ -15,6 +17,8 @@ from .const import (
|
|||||||
CONNECTION_TEST_DATA,
|
CONNECTION_TEST_DATA,
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
PREANNOUNCE_FILENAME,
|
||||||
|
PREANNOUNCE_URL,
|
||||||
AssistSatelliteEntityFeature,
|
AssistSatelliteEntityFeature,
|
||||||
)
|
)
|
||||||
from .entity import (
|
from .entity import (
|
||||||
@ -56,6 +60,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
{
|
{
|
||||||
vol.Optional("message"): str,
|
vol.Optional("message"): str,
|
||||||
vol.Optional("media_id"): str,
|
vol.Optional("media_id"): str,
|
||||||
|
vol.Optional("preannounce"): bool,
|
||||||
|
vol.Optional("preannounce_media_id"): str,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
cv.has_at_least_one_key("message", "media_id"),
|
cv.has_at_least_one_key("message", "media_id"),
|
||||||
@ -70,6 +76,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
{
|
{
|
||||||
vol.Optional("start_message"): str,
|
vol.Optional("start_message"): str,
|
||||||
vol.Optional("start_media_id"): str,
|
vol.Optional("start_media_id"): str,
|
||||||
|
vol.Optional("preannounce"): bool,
|
||||||
|
vol.Optional("preannounce_media_id"): str,
|
||||||
vol.Optional("extra_system_prompt"): str,
|
vol.Optional("extra_system_prompt"): str,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
@ -82,6 +90,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
async_register_websocket_api(hass)
|
async_register_websocket_api(hass)
|
||||||
hass.http.register_view(ConnectionTestView())
|
hass.http.register_view(ConnectionTestView())
|
||||||
|
|
||||||
|
# Default preannounce sound
|
||||||
|
await hass.http.async_register_static_paths(
|
||||||
|
[
|
||||||
|
StaticPathConfig(
|
||||||
|
PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,6 +20,9 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey(
|
|||||||
f"{DOMAIN}_connection_tests"
|
f"{DOMAIN}_connection_tests"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
PREANNOUNCE_FILENAME = "preannounce.mp3"
|
||||||
|
PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}"
|
||||||
|
|
||||||
|
|
||||||
class AssistSatelliteEntityFeature(IntFlag):
|
class AssistSatelliteEntityFeature(IntFlag):
|
||||||
"""Supported features of Assist satellite entity."""
|
"""Supported features of Assist satellite entity."""
|
||||||
|
@ -23,15 +23,12 @@ from homeassistant.components.assist_pipeline import (
|
|||||||
vad,
|
vad,
|
||||||
)
|
)
|
||||||
from homeassistant.components.media_player import async_process_play_media_url
|
from homeassistant.components.media_player import async_process_play_media_url
|
||||||
from homeassistant.components.tts import (
|
|
||||||
generate_media_source_id as tts_generate_media_source_id,
|
|
||||||
)
|
|
||||||
from homeassistant.core import Context, callback
|
from homeassistant.core import Context, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import chat_session, entity
|
from homeassistant.helpers import chat_session, entity
|
||||||
from homeassistant.helpers.entity import EntityDescription
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
|
||||||
from .const import AssistSatelliteEntityFeature
|
from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature
|
||||||
from .errors import AssistSatelliteError, SatelliteBusyError
|
from .errors import AssistSatelliteError, SatelliteBusyError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -98,9 +95,15 @@ class AssistSatelliteAnnouncement:
|
|||||||
original_media_id: str
|
original_media_id: str
|
||||||
"""The raw media ID before processing."""
|
"""The raw media ID before processing."""
|
||||||
|
|
||||||
|
tts_token: str | None
|
||||||
|
"""The TTS token of the media."""
|
||||||
|
|
||||||
media_id_source: Literal["url", "media_id", "tts"]
|
media_id_source: Literal["url", "media_id", "tts"]
|
||||||
"""Source of the media ID."""
|
"""Source of the media ID."""
|
||||||
|
|
||||||
|
preannounce_media_id: str | None = None
|
||||||
|
"""Media ID to be played before announcement."""
|
||||||
|
|
||||||
|
|
||||||
class AssistSatelliteEntity(entity.Entity):
|
class AssistSatelliteEntity(entity.Entity):
|
||||||
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
||||||
@ -177,6 +180,8 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
self,
|
self,
|
||||||
message: str | None = None,
|
message: str | None = None,
|
||||||
media_id: str | None = None,
|
media_id: str | None = None,
|
||||||
|
preannounce: bool = True,
|
||||||
|
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Play and show an announcement on the satellite.
|
"""Play and show an announcement on the satellite.
|
||||||
|
|
||||||
@ -186,6 +191,9 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
If media_id is provided, it is played directly. It is possible
|
If media_id is provided, it is played directly. It is possible
|
||||||
to omit the message and the satellite will not show any text.
|
to omit the message and the satellite will not show any text.
|
||||||
|
|
||||||
|
If preannounce is True, a sound is played before the announcement.
|
||||||
|
If preannounce_media_id is provided, it overrides the default sound.
|
||||||
|
|
||||||
Calls async_announce with message and media id.
|
Calls async_announce with message and media id.
|
||||||
"""
|
"""
|
||||||
await self._cancel_running_pipeline()
|
await self._cancel_running_pipeline()
|
||||||
@ -193,7 +201,11 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
if message is None:
|
if message is None:
|
||||||
message = ""
|
message = ""
|
||||||
|
|
||||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
announcement = await self._resolve_announcement_media_id(
|
||||||
|
message,
|
||||||
|
media_id,
|
||||||
|
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||||
|
)
|
||||||
|
|
||||||
if self._is_announcing:
|
if self._is_announcing:
|
||||||
raise SatelliteBusyError
|
raise SatelliteBusyError
|
||||||
@ -220,6 +232,8 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
start_message: str | None = None,
|
start_message: str | None = None,
|
||||||
start_media_id: str | None = None,
|
start_media_id: str | None = None,
|
||||||
extra_system_prompt: str | None = None,
|
extra_system_prompt: str | None = None,
|
||||||
|
preannounce: bool = True,
|
||||||
|
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Start a conversation from the satellite.
|
"""Start a conversation from the satellite.
|
||||||
|
|
||||||
@ -229,6 +243,9 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
If start_media_id is provided, it is played directly. It is possible
|
If start_media_id is provided, it is played directly. It is possible
|
||||||
to omit the message and the satellite will not show any text.
|
to omit the message and the satellite will not show any text.
|
||||||
|
|
||||||
|
If preannounce is True, a sound is played before the start message or media.
|
||||||
|
If preannounce_media_id is provided, it overrides the default sound.
|
||||||
|
|
||||||
Calls async_start_conversation.
|
Calls async_start_conversation.
|
||||||
"""
|
"""
|
||||||
await self._cancel_running_pipeline()
|
await self._cancel_running_pipeline()
|
||||||
@ -244,13 +261,17 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
start_message = ""
|
start_message = ""
|
||||||
|
|
||||||
announcement = await self._resolve_announcement_media_id(
|
announcement = await self._resolve_announcement_media_id(
|
||||||
start_message, start_media_id
|
start_message,
|
||||||
|
start_media_id,
|
||||||
|
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self._is_announcing:
|
if self._is_announcing:
|
||||||
raise SatelliteBusyError
|
raise SatelliteBusyError
|
||||||
|
|
||||||
self._is_announcing = True
|
self._is_announcing = True
|
||||||
|
self._set_state(AssistSatelliteState.RESPONDING)
|
||||||
|
|
||||||
# Provide our start info to the LLM so it understands context of incoming message
|
# Provide our start info to the LLM so it understands context of incoming message
|
||||||
if extra_system_prompt is not None:
|
if extra_system_prompt is not None:
|
||||||
self._extra_system_prompt = extra_system_prompt
|
self._extra_system_prompt = extra_system_prompt
|
||||||
@ -280,6 +301,7 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
self._is_announcing = False
|
self._is_announcing = False
|
||||||
|
self._set_state(AssistSatelliteState.IDLE)
|
||||||
|
|
||||||
async def async_start_conversation(
|
async def async_start_conversation(
|
||||||
self, start_announcement: AssistSatelliteAnnouncement
|
self, start_announcement: AssistSatelliteAnnouncement
|
||||||
@ -470,20 +492,27 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||||
|
|
||||||
async def _resolve_announcement_media_id(
|
async def _resolve_announcement_media_id(
|
||||||
self, message: str, media_id: str | None
|
self,
|
||||||
|
message: str,
|
||||||
|
media_id: str | None,
|
||||||
|
preannounce_media_id: str | None = None,
|
||||||
) -> AssistSatelliteAnnouncement:
|
) -> AssistSatelliteAnnouncement:
|
||||||
"""Resolve the media ID."""
|
"""Resolve the media ID."""
|
||||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||||
|
tts_token: str | None = None
|
||||||
|
|
||||||
if media_id:
|
if media_id:
|
||||||
original_media_id = media_id
|
original_media_id = media_id
|
||||||
|
|
||||||
else:
|
else:
|
||||||
media_id_source = "tts"
|
media_id_source = "tts"
|
||||||
# Synthesize audio and get URL
|
# Synthesize audio and get URL
|
||||||
pipeline_id = self._resolve_pipeline()
|
pipeline_id = self._resolve_pipeline()
|
||||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||||
|
|
||||||
|
engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine)
|
||||||
|
if engine is None:
|
||||||
|
raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found")
|
||||||
|
|
||||||
tts_options: dict[str, Any] = {}
|
tts_options: dict[str, Any] = {}
|
||||||
if pipeline.tts_voice is not None:
|
if pipeline.tts_voice is not None:
|
||||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||||
@ -491,14 +520,23 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
if self.tts_options is not None:
|
if self.tts_options is not None:
|
||||||
tts_options.update(self.tts_options)
|
tts_options.update(self.tts_options)
|
||||||
|
|
||||||
media_id = tts_generate_media_source_id(
|
stream = tts.async_create_stream(
|
||||||
self.hass,
|
self.hass,
|
||||||
message,
|
engine=engine,
|
||||||
engine=pipeline.tts_engine,
|
language=pipeline.tts_language,
|
||||||
|
options=tts_options,
|
||||||
|
)
|
||||||
|
stream.async_set_message(message)
|
||||||
|
|
||||||
|
tts_token = stream.token
|
||||||
|
media_id = stream.url
|
||||||
|
original_media_id = tts.generate_media_source_id(
|
||||||
|
self.hass,
|
||||||
|
message,
|
||||||
|
engine=engine,
|
||||||
language=pipeline.tts_language,
|
language=pipeline.tts_language,
|
||||||
options=tts_options,
|
options=tts_options,
|
||||||
)
|
)
|
||||||
original_media_id = media_id
|
|
||||||
|
|
||||||
if media_source.is_media_source_id(media_id):
|
if media_source.is_media_source_id(media_id):
|
||||||
if not media_id_source:
|
if not media_id_source:
|
||||||
@ -516,6 +554,26 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
# Resolve to full URL
|
# Resolve to full URL
|
||||||
media_id = async_process_play_media_url(self.hass, media_id)
|
media_id = async_process_play_media_url(self.hass, media_id)
|
||||||
|
|
||||||
|
# Resolve preannounce media id
|
||||||
|
if preannounce_media_id:
|
||||||
|
if media_source.is_media_source_id(preannounce_media_id):
|
||||||
|
preannounce_media = await media_source.async_resolve_media(
|
||||||
|
self.hass,
|
||||||
|
preannounce_media_id,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
preannounce_media_id = preannounce_media.url
|
||||||
|
|
||||||
|
# Resolve to full URL
|
||||||
|
preannounce_media_id = async_process_play_media_url(
|
||||||
|
self.hass, preannounce_media_id
|
||||||
|
)
|
||||||
|
|
||||||
return AssistSatelliteAnnouncement(
|
return AssistSatelliteAnnouncement(
|
||||||
message, media_id, original_media_id, media_id_source
|
message=message,
|
||||||
|
media_id=media_id,
|
||||||
|
original_media_id=original_media_id,
|
||||||
|
tts_token=tts_token,
|
||||||
|
media_id_source=media_id_source,
|
||||||
|
preannounce_media_id=preannounce_media_id,
|
||||||
)
|
)
|
||||||
|
BIN
homeassistant/components/assist_satellite/preannounce.mp3
Normal file
BIN
homeassistant/components/assist_satellite/preannounce.mp3
Normal file
Binary file not shown.
@ -8,12 +8,22 @@ announce:
|
|||||||
message:
|
message:
|
||||||
required: false
|
required: false
|
||||||
example: "Time to wake up!"
|
example: "Time to wake up!"
|
||||||
|
default: ""
|
||||||
selector:
|
selector:
|
||||||
text:
|
text:
|
||||||
media_id:
|
media_id:
|
||||||
required: false
|
required: false
|
||||||
selector:
|
selector:
|
||||||
text:
|
text:
|
||||||
|
preannounce:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
preannounce_media_id:
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
text:
|
||||||
start_conversation:
|
start_conversation:
|
||||||
target:
|
target:
|
||||||
entity:
|
entity:
|
||||||
@ -24,6 +34,7 @@ start_conversation:
|
|||||||
start_message:
|
start_message:
|
||||||
required: false
|
required: false
|
||||||
example: "You left the lights on in the living room. Turn them off?"
|
example: "You left the lights on in the living room. Turn them off?"
|
||||||
|
default: ""
|
||||||
selector:
|
selector:
|
||||||
text:
|
text:
|
||||||
start_media_id:
|
start_media_id:
|
||||||
@ -34,3 +45,12 @@ start_conversation:
|
|||||||
required: false
|
required: false
|
||||||
selector:
|
selector:
|
||||||
text:
|
text:
|
||||||
|
preannounce:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
selector:
|
||||||
|
boolean:
|
||||||
|
preannounce_media_id:
|
||||||
|
required: false
|
||||||
|
selector:
|
||||||
|
text:
|
||||||
|
@ -23,6 +23,14 @@
|
|||||||
"media_id": {
|
"media_id": {
|
||||||
"name": "Media ID",
|
"name": "Media ID",
|
||||||
"description": "The media ID to announce instead of using text-to-speech."
|
"description": "The media ID to announce instead of using text-to-speech."
|
||||||
|
},
|
||||||
|
"preannounce": {
|
||||||
|
"name": "Preannounce",
|
||||||
|
"description": "Play a sound before the announcement."
|
||||||
|
},
|
||||||
|
"preannounce_media_id": {
|
||||||
|
"name": "Preannounce media ID",
|
||||||
|
"description": "Custom media ID to play before the announcement."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -41,6 +49,14 @@
|
|||||||
"extra_system_prompt": {
|
"extra_system_prompt": {
|
||||||
"name": "Extra system prompt",
|
"name": "Extra system prompt",
|
||||||
"description": "Provide background information to the AI about the request."
|
"description": "Provide background information to the AI about the request."
|
||||||
|
},
|
||||||
|
"preannounce": {
|
||||||
|
"name": "Preannounce",
|
||||||
|
"description": "Play a sound before the start message or media."
|
||||||
|
},
|
||||||
|
"preannounce_media_id": {
|
||||||
|
"name": "Preannounce media ID",
|
||||||
|
"description": "Custom media ID to play before the start message or media."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,8 @@ async def websocket_test_connection(
|
|||||||
|
|
||||||
hass.async_create_background_task(
|
hass.async_create_background_task(
|
||||||
satellite.async_internal_announce(
|
satellite.async_internal_announce(
|
||||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}"
|
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
|
||||||
|
preannounce=False,
|
||||||
),
|
),
|
||||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||||
)
|
)
|
||||||
|
@ -66,28 +66,28 @@
|
|||||||
"name": "Upload"
|
"name": "Upload"
|
||||||
},
|
},
|
||||||
"load_avg_1m": {
|
"load_avg_1m": {
|
||||||
"name": "Average load (1m)"
|
"name": "Average load (1 min)"
|
||||||
},
|
},
|
||||||
"load_avg_5m": {
|
"load_avg_5m": {
|
||||||
"name": "Average load (5m)"
|
"name": "Average load (5 min)"
|
||||||
},
|
},
|
||||||
"load_avg_15m": {
|
"load_avg_15m": {
|
||||||
"name": "Average load (15m)"
|
"name": "Average load (15 min)"
|
||||||
},
|
},
|
||||||
"24ghz_temperature": {
|
"24ghz_temperature": {
|
||||||
"name": "2.4GHz Temperature"
|
"name": "2.4GHz temperature"
|
||||||
},
|
},
|
||||||
"5ghz_temperature": {
|
"5ghz_temperature": {
|
||||||
"name": "5GHz Temperature"
|
"name": "5GHz temperature"
|
||||||
},
|
},
|
||||||
"cpu_temperature": {
|
"cpu_temperature": {
|
||||||
"name": "CPU Temperature"
|
"name": "CPU temperature"
|
||||||
},
|
},
|
||||||
"5ghz_2_temperature": {
|
"5ghz_2_temperature": {
|
||||||
"name": "5GHz Temperature (Radio 2)"
|
"name": "5GHz temperature (Radio 2)"
|
||||||
},
|
},
|
||||||
"6ghz_temperature": {
|
"6ghz_temperature": {
|
||||||
"name": "6GHz Temperature"
|
"name": "6GHz temperature"
|
||||||
},
|
},
|
||||||
"cpu_usage": {
|
"cpu_usage": {
|
||||||
"name": "CPU usage"
|
"name": "CPU usage"
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
"personal_access_token": "Personal Access Token (PAT)"
|
"personal_access_token": "Personal Access Token (PAT)"
|
||||||
},
|
},
|
||||||
"description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.",
|
"description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.",
|
||||||
"title": "Add Azure DevOps Project"
|
"title": "Add Azure DevOps project"
|
||||||
},
|
},
|
||||||
"reauth_confirm": {
|
"reauth_confirm": {
|
||||||
"data": {
|
"data": {
|
||||||
@ -32,7 +32,7 @@
|
|||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"build_id": {
|
"build_id": {
|
||||||
"name": "{definition_name} latest build id"
|
"name": "{definition_name} latest build ID"
|
||||||
},
|
},
|
||||||
"finish_time": {
|
"finish_time": {
|
||||||
"name": "{definition_name} latest build finish time"
|
"name": "{definition_name} latest build finish time"
|
||||||
@ -59,7 +59,7 @@
|
|||||||
"name": "{definition_name} latest build start time"
|
"name": "{definition_name} latest build start time"
|
||||||
},
|
},
|
||||||
"url": {
|
"url": {
|
||||||
"name": "{definition_name} latest build url"
|
"name": "{definition_name} latest build URL"
|
||||||
},
|
},
|
||||||
"work_item_count": {
|
"work_item_count": {
|
||||||
"name": "{item_type} {item_state} work items"
|
"name": "{item_type} {item_state} work items"
|
||||||
@ -68,7 +68,7 @@
|
|||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
"authentication_failed": {
|
"authentication_failed": {
|
||||||
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token."
|
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,11 @@ from azure.storage.blob.aio import ContainerClient
|
|||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
from homeassistant.exceptions import (
|
||||||
|
ConfigEntryAuthFailed,
|
||||||
|
ConfigEntryError,
|
||||||
|
ConfigEntryNotReady,
|
||||||
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -52,7 +56,7 @@ async def async_setup_entry(
|
|||||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||||
) from err
|
) from err
|
||||||
except ClientAuthenticationError as err:
|
except ClientAuthenticationError as err:
|
||||||
raise ConfigEntryError(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="invalid_auth",
|
translation_key="invalid_auth",
|
||||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||||
|
@ -141,7 +141,7 @@ class AzureStorageBackupAgent(BackupAgent):
|
|||||||
"""Delete a backup file."""
|
"""Delete a backup file."""
|
||||||
blob = await self._find_blob_by_backup_id(backup_id)
|
blob = await self._find_blob_by_backup_id(backup_id)
|
||||||
if blob is None:
|
if blob is None:
|
||||||
return
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
await self._client.delete_blob(blob.name)
|
await self._client.delete_blob(blob.name)
|
||||||
|
|
||||||
@handle_backup_errors
|
@handle_backup_errors
|
||||||
@ -163,11 +163,11 @@ class AzureStorageBackupAgent(BackupAgent):
|
|||||||
self,
|
self,
|
||||||
backup_id: str,
|
backup_id: str,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> AgentBackup | None:
|
) -> AgentBackup:
|
||||||
"""Return a backup."""
|
"""Return a backup."""
|
||||||
blob = await self._find_blob_by_backup_id(backup_id)
|
blob = await self._find_blob_by_backup_id(backup_id)
|
||||||
if blob is None:
|
if blob is None:
|
||||||
return None
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
||||||
|
|
||||||
@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent):
|
|||||||
"""Find a blob by backup id."""
|
"""Find a blob by backup id."""
|
||||||
async for blob in self._client.list_blobs(include="metadata"):
|
async for blob in self._client.list_blobs(include="metadata"):
|
||||||
if (
|
if (
|
||||||
backup_id == blob.metadata.get("backup_id", "")
|
blob.metadata is not None
|
||||||
|
and backup_id == blob.metadata.get("backup_id", "")
|
||||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||||
):
|
):
|
||||||
return blob
|
return blob
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Config flow for Azure Storage integration."""
|
"""Config flow for Azure Storage integration."""
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -26,6 +27,26 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for azure storage."""
|
"""Handle a config flow for azure storage."""
|
||||||
|
|
||||||
|
def get_account_url(self, account_name: str) -> str:
|
||||||
|
"""Get the account URL."""
|
||||||
|
return f"https://{account_name}.blob.core.windows.net/"
|
||||||
|
|
||||||
|
async def validate_config(
|
||||||
|
self, container_client: ContainerClient
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""Validate the configuration."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
try:
|
||||||
|
await container_client.exists()
|
||||||
|
except ResourceNotFoundError:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except ClientAuthenticationError:
|
||||||
|
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unknown exception occurred")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
return errors
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
@ -38,20 +59,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||||
)
|
)
|
||||||
container_client = ContainerClient(
|
container_client = ContainerClient(
|
||||||
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||||
container_name=user_input[CONF_CONTAINER_NAME],
|
container_name=user_input[CONF_CONTAINER_NAME],
|
||||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||||
)
|
)
|
||||||
try:
|
errors = await self.validate_config(container_client)
|
||||||
await container_client.exists()
|
|
||||||
except ResourceNotFoundError:
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
except ClientAuthenticationError:
|
|
||||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
|
||||||
except Exception:
|
|
||||||
_LOGGER.exception("Unknown exception occurred")
|
|
||||||
errors["base"] = "unknown"
|
|
||||||
if not errors:
|
if not errors:
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
||||||
@ -70,3 +84,77 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
),
|
),
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Perform reauth upon an API authentication error."""
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Confirm reauth dialog."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
reauth_entry = self._get_reauth_entry()
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
container_client = ContainerClient(
|
||||||
|
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||||
|
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||||
|
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||||
|
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||||
|
)
|
||||||
|
errors = await self.validate_config(container_client)
|
||||||
|
if not errors:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
reauth_entry,
|
||||||
|
data={**reauth_entry.data, **user_input},
|
||||||
|
)
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Reconfigure the entry."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
reconfigure_entry = self._get_reconfigure_entry()
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
container_client = ContainerClient(
|
||||||
|
account_url=self.get_account_url(
|
||||||
|
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||||
|
),
|
||||||
|
container_name=user_input[CONF_CONTAINER_NAME],
|
||||||
|
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||||
|
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||||
|
)
|
||||||
|
errors = await self.validate_config(container_client)
|
||||||
|
if not errors:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
reconfigure_entry,
|
||||||
|
data={**reconfigure_entry.data, **user_input},
|
||||||
|
)
|
||||||
|
return self.async_show_form(
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(
|
||||||
|
CONF_CONTAINER_NAME,
|
||||||
|
default=reconfigure_entry.data[CONF_CONTAINER_NAME],
|
||||||
|
): str,
|
||||||
|
vol.Required(
|
||||||
|
CONF_STORAGE_ACCOUNT_KEY,
|
||||||
|
default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||||
|
): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["azure-storage-blob"],
|
"loggers": ["azure-storage-blob"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["azure-storage-blob==12.24.0"]
|
"requirements": ["azure-storage-blob==12.24.0"]
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ rules:
|
|||||||
status: exempt
|
status: exempt
|
||||||
comment: |
|
comment: |
|
||||||
This integration does not have platforms.
|
This integration does not have platforms.
|
||||||
reauthentication-flow: todo
|
reauthentication-flow: done
|
||||||
test-coverage: done
|
test-coverage: done
|
||||||
|
|
||||||
# Gold
|
# Gold
|
||||||
@ -121,7 +121,7 @@ rules:
|
|||||||
status: exempt
|
status: exempt
|
||||||
comment: |
|
comment: |
|
||||||
This integration does not have entities.
|
This integration does not have entities.
|
||||||
reconfiguration-flow: todo
|
reconfiguration-flow: done
|
||||||
repair-issues: done
|
repair-issues: done
|
||||||
stale-devices:
|
stale-devices:
|
||||||
status: exempt
|
status: exempt
|
||||||
|
@ -19,10 +19,34 @@
|
|||||||
},
|
},
|
||||||
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
||||||
"title": "Add Azure storage account"
|
"title": "Add Azure storage account"
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"data": {
|
||||||
|
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||||
|
},
|
||||||
|
"description": "Provide a new storage account key.",
|
||||||
|
"title": "Reauthenticate Azure storage account"
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]",
|
||||||
|
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]",
|
||||||
|
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||||
|
},
|
||||||
|
"description": "Change the settings of the Azure storage integration.",
|
||||||
|
"title": "Reconfigure Azure storage account"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"issues": {
|
"issues": {
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
"""The Backup integration."""
|
"""The Backup integration."""
|
||||||
|
|
||||||
|
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||||
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall
|
from homeassistant.core import HomeAssistant, ServiceCall
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||||
from homeassistant.helpers.backup import DATA_BACKUP
|
from homeassistant.helpers.backup import DATA_BACKUP
|
||||||
from homeassistant.helpers.hassio import is_hassio
|
from homeassistant.helpers.hassio import is_hassio
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
@ -18,10 +20,12 @@ from .agent import (
|
|||||||
)
|
)
|
||||||
from .config import BackupConfig, CreateBackupParametersDict
|
from .config import BackupConfig, CreateBackupParametersDict
|
||||||
from .const import DATA_MANAGER, DOMAIN
|
from .const import DATA_MANAGER, DOMAIN
|
||||||
|
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||||
from .http import async_register_http_views
|
from .http import async_register_http_views
|
||||||
from .manager import (
|
from .manager import (
|
||||||
BackupManager,
|
BackupManager,
|
||||||
BackupManagerError,
|
BackupManagerError,
|
||||||
|
BackupPlatformEvent,
|
||||||
BackupPlatformProtocol,
|
BackupPlatformProtocol,
|
||||||
BackupReaderWriter,
|
BackupReaderWriter,
|
||||||
BackupReaderWriterError,
|
BackupReaderWriterError,
|
||||||
@ -52,6 +56,7 @@ __all__ = [
|
|||||||
"BackupConfig",
|
"BackupConfig",
|
||||||
"BackupManagerError",
|
"BackupManagerError",
|
||||||
"BackupNotFound",
|
"BackupNotFound",
|
||||||
|
"BackupPlatformEvent",
|
||||||
"BackupPlatformProtocol",
|
"BackupPlatformProtocol",
|
||||||
"BackupReaderWriter",
|
"BackupReaderWriter",
|
||||||
"BackupReaderWriterError",
|
"BackupReaderWriterError",
|
||||||
@ -74,6 +79,8 @@ __all__ = [
|
|||||||
"suggested_filename_from_name_date",
|
"suggested_filename_from_name_date",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
PLATFORMS = [Platform.SENSOR]
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
async_register_http_views(hass)
|
async_register_http_views(hass)
|
||||||
|
|
||||||
|
discovery_flow.async_create_flow(
|
||||||
|
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||||
|
"""Set up a config entry."""
|
||||||
|
backup_manager: BackupManager = hass.data[DATA_MANAGER]
|
||||||
|
coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager)
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_unsubscribe)
|
||||||
|
|
||||||
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
@ -41,6 +41,8 @@ class BackupAgent(abc.ABC):
|
|||||||
) -> AsyncIterator[bytes]:
|
) -> AsyncIterator[bytes]:
|
||||||
"""Download a backup file.
|
"""Download a backup file.
|
||||||
|
|
||||||
|
Raises BackupNotFound if the backup does not exist.
|
||||||
|
|
||||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||||
:return: An async iterator that yields bytes.
|
:return: An async iterator that yields bytes.
|
||||||
"""
|
"""
|
||||||
@ -67,6 +69,8 @@ class BackupAgent(abc.ABC):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Delete a backup file.
|
"""Delete a backup file.
|
||||||
|
|
||||||
|
Raises BackupNotFound if the backup does not exist.
|
||||||
|
|
||||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -79,8 +83,11 @@ class BackupAgent(abc.ABC):
|
|||||||
self,
|
self,
|
||||||
backup_id: str,
|
backup_id: str,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> AgentBackup | None:
|
) -> AgentBackup:
|
||||||
"""Return a backup."""
|
"""Return a backup.
|
||||||
|
|
||||||
|
Raises BackupNotFound if the backup does not exist.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class LocalBackupAgent(BackupAgent):
|
class LocalBackupAgent(BackupAgent):
|
||||||
|
@ -88,13 +88,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
|||||||
self,
|
self,
|
||||||
backup_id: str,
|
backup_id: str,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> AgentBackup | None:
|
) -> AgentBackup:
|
||||||
"""Return a backup."""
|
"""Return a backup."""
|
||||||
if not self._loaded_backups:
|
if not self._loaded_backups:
|
||||||
await self._load_backups()
|
await self._load_backups()
|
||||||
|
|
||||||
if backup_id not in self._backups:
|
if backup_id not in self._backups:
|
||||||
return None
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
backup, backup_path = self._backups[backup_id]
|
backup, backup_path = self._backups[backup_id]
|
||||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||||
@ -107,7 +107,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
|||||||
backup_path,
|
backup_path,
|
||||||
)
|
)
|
||||||
self._backups.pop(backup_id)
|
self._backups.pop(backup_id)
|
||||||
return None
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
@ -130,10 +130,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
|||||||
if not self._loaded_backups:
|
if not self._loaded_backups:
|
||||||
await self._load_backups()
|
await self._load_backups()
|
||||||
|
|
||||||
try:
|
backup_path = self.get_backup_path(backup_id)
|
||||||
backup_path = self.get_backup_path(backup_id)
|
|
||||||
except BackupNotFound:
|
|
||||||
return
|
|
||||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||||
self._backups.pop(backup_id)
|
self._backups.pop(backup_id)
|
||||||
|
21
homeassistant/components/backup/config_flow.py
Normal file
21
homeassistant/components/backup/config_flow.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
"""Config flow for Home Assistant Backup integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
class BackupConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Home Assistant Backup."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
|
||||||
|
async def async_step_system(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
return self.async_create_entry(title="Backup", data={})
|
@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
|||||||
LOGGER = getLogger(__package__)
|
LOGGER = getLogger(__package__)
|
||||||
|
|
||||||
EXCLUDE_FROM_BACKUP = [
|
EXCLUDE_FROM_BACKUP = [
|
||||||
"__pycache__/*",
|
"**/__pycache__/*",
|
||||||
".DS_Store",
|
"**/.DS_Store",
|
||||||
".HA_RESTORE",
|
".HA_RESTORE",
|
||||||
"*.db-shm",
|
"*.db-shm",
|
||||||
"*.log.*",
|
"*.log.*",
|
||||||
|
81
homeassistant/components/backup/coordinator.py
Normal file
81
homeassistant/components/backup/coordinator.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
"""Coordinator for Home Assistant Backup integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.backup import (
|
||||||
|
async_subscribe_events,
|
||||||
|
async_subscribe_platform_events,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
|
from .const import DOMAIN, LOGGER
|
||||||
|
from .manager import (
|
||||||
|
BackupManager,
|
||||||
|
BackupManagerState,
|
||||||
|
BackupPlatformEvent,
|
||||||
|
ManagerStateEvent,
|
||||||
|
)
|
||||||
|
|
||||||
|
type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BackupCoordinatorData:
|
||||||
|
"""Class to hold backup data."""
|
||||||
|
|
||||||
|
backup_manager_state: BackupManagerState
|
||||||
|
last_successful_automatic_backup: datetime | None
|
||||||
|
next_scheduled_automatic_backup: datetime | None
|
||||||
|
|
||||||
|
|
||||||
|
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||||
|
"""Class to retrieve backup status."""
|
||||||
|
|
||||||
|
config_entry: ConfigEntry
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
backup_manager: BackupManager,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
LOGGER,
|
||||||
|
config_entry=config_entry,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=None,
|
||||||
|
)
|
||||||
|
self.unsubscribe: list[Callable[[], None]] = [
|
||||||
|
async_subscribe_events(hass, self._on_event),
|
||||||
|
async_subscribe_platform_events(hass, self._on_event),
|
||||||
|
]
|
||||||
|
|
||||||
|
self.backup_manager = backup_manager
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
|
||||||
|
"""Handle new event."""
|
||||||
|
LOGGER.debug("Received backup event: %s", event)
|
||||||
|
self.config_entry.async_create_task(self.hass, self.async_refresh())
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> BackupCoordinatorData:
|
||||||
|
"""Update backup manager data."""
|
||||||
|
return BackupCoordinatorData(
|
||||||
|
self.backup_manager.state,
|
||||||
|
self.backup_manager.config.data.last_completed_automatic_backup,
|
||||||
|
self.backup_manager.config.data.schedule.next_automatic_backup,
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_unsubscribe(self) -> None:
|
||||||
|
"""Unsubscribe from events."""
|
||||||
|
for unsub in self.unsubscribe:
|
||||||
|
unsub()
|
27
homeassistant/components/backup/diagnostics.py
Normal file
27
homeassistant/components/backup/diagnostics.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
"""Diagnostics support for Home Assistant Backup integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
|
from homeassistant.const import CONF_PASSWORD
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import BackupConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: BackupConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
return {
|
||||||
|
"backup_agents": [
|
||||||
|
{"name": agent.name, "agent_id": agent.agent_id}
|
||||||
|
for agent in coordinator.backup_manager.backup_agents.values()
|
||||||
|
],
|
||||||
|
"backup_config": async_redact_data(
|
||||||
|
coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD]
|
||||||
|
),
|
||||||
|
}
|
36
homeassistant/components/backup/entity.py
Normal file
36
homeassistant/components/backup/entity.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
"""Base for backup entities."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from homeassistant.const import __version__ as HA_VERSION
|
||||||
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import BackupDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||||
|
"""Base entity for backup manager."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: BackupDataUpdateCoordinator,
|
||||||
|
entity_description: EntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize base entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description = entity_description
|
||||||
|
self._attr_unique_id = entity_description.key
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, "backup_manager")},
|
||||||
|
manufacturer="Home Assistant",
|
||||||
|
model="Home Assistant Backup",
|
||||||
|
sw_version=HA_VERSION,
|
||||||
|
name="Backup",
|
||||||
|
entry_type=DeviceEntryType.SERVICE,
|
||||||
|
configuration_url="homeassistant://config/backup",
|
||||||
|
)
|
@ -15,6 +15,7 @@ from multidict import istr
|
|||||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers import frame
|
||||||
from homeassistant.util import slugify
|
from homeassistant.util import slugify
|
||||||
|
|
||||||
from . import util
|
from . import util
|
||||||
@ -59,11 +60,19 @@ class DownloadBackupView(HomeAssistantView):
|
|||||||
if agent_id not in manager.backup_agents:
|
if agent_id not in manager.backup_agents:
|
||||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||||
agent = manager.backup_agents[agent_id]
|
agent = manager.backup_agents[agent_id]
|
||||||
backup = await agent.async_get_backup(backup_id)
|
try:
|
||||||
|
backup = await agent.async_get_backup(backup_id)
|
||||||
|
except BackupNotFound:
|
||||||
|
return Response(status=HTTPStatus.NOT_FOUND)
|
||||||
|
|
||||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||||
# that
|
# this can be removed in HA Core 2025.10
|
||||||
if backup is None:
|
if not backup:
|
||||||
|
frame.report_usage(
|
||||||
|
"returns None from BackupAgent.async_get_backup",
|
||||||
|
breaks_in_ha_version="2025.10",
|
||||||
|
integration_domain=agent_id.partition(".")[0],
|
||||||
|
)
|
||||||
return Response(status=HTTPStatus.NOT_FOUND)
|
return Response(status=HTTPStatus.NOT_FOUND)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@ -92,6 +101,8 @@ class DownloadBackupView(HomeAssistantView):
|
|||||||
) -> StreamResponse | FileResponse | Response:
|
) -> StreamResponse | FileResponse | Response:
|
||||||
if agent_id in manager.local_backup_agents:
|
if agent_id in manager.local_backup_agents:
|
||||||
local_agent = manager.local_backup_agents[agent_id]
|
local_agent = manager.local_backup_agents[agent_id]
|
||||||
|
# We don't need to check if the path exists, aiohttp.FileResponse will
|
||||||
|
# handle that
|
||||||
path = local_agent.get_backup_path(backup_id)
|
path = local_agent.get_backup_path(backup_id)
|
||||||
return FileResponse(path=path.as_posix(), headers=headers)
|
return FileResponse(path=path.as_posix(), headers=headers)
|
||||||
|
|
||||||
|
@ -30,6 +30,7 @@ from homeassistant.backup_restore import (
|
|||||||
from homeassistant.const import __version__ as HAVERSION
|
from homeassistant.const import __version__ as HAVERSION
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import (
|
||||||
|
frame,
|
||||||
instance_id,
|
instance_id,
|
||||||
integration_platform,
|
integration_platform,
|
||||||
issue_registry as ir,
|
issue_registry as ir,
|
||||||
@ -64,6 +65,7 @@ from .models import (
|
|||||||
AgentBackup,
|
AgentBackup,
|
||||||
BackupError,
|
BackupError,
|
||||||
BackupManagerError,
|
BackupManagerError,
|
||||||
|
BackupNotFound,
|
||||||
BackupReaderWriterError,
|
BackupReaderWriterError,
|
||||||
BaseBackup,
|
BaseBackup,
|
||||||
Folder,
|
Folder,
|
||||||
@ -227,6 +229,13 @@ class RestoreBackupEvent(ManagerStateEvent):
|
|||||||
state: RestoreBackupState
|
state: RestoreBackupState
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||||
|
class BackupPlatformEvent:
|
||||||
|
"""Backup platform class."""
|
||||||
|
|
||||||
|
domain: str
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||||
class BlockedEvent(ManagerStateEvent):
|
class BlockedEvent(ManagerStateEvent):
|
||||||
"""Backup manager blocked, Home Assistant is starting."""
|
"""Backup manager blocked, Home Assistant is starting."""
|
||||||
@ -349,10 +358,13 @@ class BackupManager:
|
|||||||
|
|
||||||
# Latest backup event and backup event subscribers
|
# Latest backup event and backup event subscribers
|
||||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||||
self.last_non_idle_event: ManagerStateEvent | None = None
|
self.last_action_event: ManagerStateEvent | None = None
|
||||||
self._backup_event_subscriptions = hass.data[
|
self._backup_event_subscriptions = hass.data[
|
||||||
DATA_BACKUP
|
DATA_BACKUP
|
||||||
].backup_event_subscriptions
|
].backup_event_subscriptions
|
||||||
|
self._backup_platform_event_subscriptions = hass.data[
|
||||||
|
DATA_BACKUP
|
||||||
|
].backup_platform_event_subscriptions
|
||||||
|
|
||||||
async def async_setup(self) -> None:
|
async def async_setup(self) -> None:
|
||||||
"""Set up the backup manager."""
|
"""Set up the backup manager."""
|
||||||
@ -463,6 +475,9 @@ class BackupManager:
|
|||||||
LOGGER.debug("%s platforms loaded in total", len(self.platforms))
|
LOGGER.debug("%s platforms loaded in total", len(self.platforms))
|
||||||
LOGGER.debug("%s agents loaded in total", len(self.backup_agents))
|
LOGGER.debug("%s agents loaded in total", len(self.backup_agents))
|
||||||
LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents))
|
LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents))
|
||||||
|
event = BackupPlatformEvent(domain=integration_domain)
|
||||||
|
for subscription in self._backup_platform_event_subscriptions:
|
||||||
|
subscription(event)
|
||||||
|
|
||||||
async def async_pre_backup_actions(self) -> None:
|
async def async_pre_backup_actions(self) -> None:
|
||||||
"""Perform pre backup actions."""
|
"""Perform pre backup actions."""
|
||||||
@ -665,6 +680,8 @@ class BackupManager:
|
|||||||
)
|
)
|
||||||
for idx, result in enumerate(get_backup_results):
|
for idx, result in enumerate(get_backup_results):
|
||||||
agent_id = agent_ids[idx]
|
agent_id = agent_ids[idx]
|
||||||
|
if isinstance(result, BackupNotFound):
|
||||||
|
continue
|
||||||
if isinstance(result, BackupAgentError):
|
if isinstance(result, BackupAgentError):
|
||||||
agent_errors[agent_id] = result
|
agent_errors[agent_id] = result
|
||||||
continue
|
continue
|
||||||
@ -676,7 +693,14 @@ class BackupManager:
|
|||||||
continue
|
continue
|
||||||
if isinstance(result, BaseException):
|
if isinstance(result, BaseException):
|
||||||
raise result # unexpected error
|
raise result # unexpected error
|
||||||
|
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||||
|
# this can be removed in HA Core 2025.10
|
||||||
if not result:
|
if not result:
|
||||||
|
frame.report_usage(
|
||||||
|
"returns None from BackupAgent.async_get_backup",
|
||||||
|
breaks_in_ha_version="2025.10",
|
||||||
|
integration_domain=agent_id.partition(".")[0],
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if backup is None:
|
if backup is None:
|
||||||
if known_backup := self.known_backups.get(backup_id):
|
if known_backup := self.known_backups.get(backup_id):
|
||||||
@ -740,6 +764,8 @@ class BackupManager:
|
|||||||
)
|
)
|
||||||
for idx, result in enumerate(delete_backup_results):
|
for idx, result in enumerate(delete_backup_results):
|
||||||
agent_id = agent_ids[idx]
|
agent_id = agent_ids[idx]
|
||||||
|
if isinstance(result, BackupNotFound):
|
||||||
|
continue
|
||||||
if isinstance(result, BackupAgentError):
|
if isinstance(result, BackupAgentError):
|
||||||
agent_errors[agent_id] = result
|
agent_errors[agent_id] = result
|
||||||
continue
|
continue
|
||||||
@ -849,7 +875,7 @@ class BackupManager:
|
|||||||
agent_errors = {
|
agent_errors = {
|
||||||
backup_id: error
|
backup_id: error
|
||||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||||
if error
|
if error and not isinstance(error, BackupNotFound)
|
||||||
}
|
}
|
||||||
if agent_errors:
|
if agent_errors:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
@ -1281,7 +1307,20 @@ class BackupManager:
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initiate restoring a backup."""
|
"""Initiate restoring a backup."""
|
||||||
agent = self.backup_agents[agent_id]
|
agent = self.backup_agents[agent_id]
|
||||||
if not await agent.async_get_backup(backup_id):
|
try:
|
||||||
|
backup = await agent.async_get_backup(backup_id)
|
||||||
|
except BackupNotFound as err:
|
||||||
|
raise BackupManagerError(
|
||||||
|
f"Backup {backup_id} not found in agent {agent_id}"
|
||||||
|
) from err
|
||||||
|
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||||
|
# this can be removed in HA Core 2025.10
|
||||||
|
if not backup:
|
||||||
|
frame.report_usage(
|
||||||
|
"returns None from BackupAgent.async_get_backup",
|
||||||
|
breaks_in_ha_version="2025.10",
|
||||||
|
integration_domain=agent_id.partition(".")[0],
|
||||||
|
)
|
||||||
raise BackupManagerError(
|
raise BackupManagerError(
|
||||||
f"Backup {backup_id} not found in agent {agent_id}"
|
f"Backup {backup_id} not found in agent {agent_id}"
|
||||||
)
|
)
|
||||||
@ -1311,7 +1350,7 @@ class BackupManager:
|
|||||||
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
|
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
|
||||||
self.last_event = event
|
self.last_event = event
|
||||||
if not isinstance(event, (BlockedEvent, IdleEvent)):
|
if not isinstance(event, (BlockedEvent, IdleEvent)):
|
||||||
self.last_non_idle_event = event
|
self.last_action_event = event
|
||||||
for subscription in self._backup_event_subscriptions:
|
for subscription in self._backup_event_subscriptions:
|
||||||
subscription(event)
|
subscription(event)
|
||||||
|
|
||||||
@ -1369,7 +1408,20 @@ class BackupManager:
|
|||||||
agent = self.backup_agents[agent_id]
|
agent = self.backup_agents[agent_id]
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
||||||
if not await agent.async_get_backup(backup_id):
|
try:
|
||||||
|
backup = await agent.async_get_backup(backup_id)
|
||||||
|
except BackupNotFound as err:
|
||||||
|
raise BackupManagerError(
|
||||||
|
f"Backup {backup_id} not found in agent {agent_id}"
|
||||||
|
) from err
|
||||||
|
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||||
|
# this can be removed in HA Core 2025.10
|
||||||
|
if not backup:
|
||||||
|
frame.report_usage(
|
||||||
|
"returns None from BackupAgent.async_get_backup",
|
||||||
|
breaks_in_ha_version="2025.10",
|
||||||
|
integration_domain=agent_id.partition(".")[0],
|
||||||
|
)
|
||||||
raise BackupManagerError(
|
raise BackupManagerError(
|
||||||
f"Backup {backup_id} not found in agent {agent_id}"
|
f"Backup {backup_id} not found in agent {agent_id}"
|
||||||
)
|
)
|
||||||
@ -1674,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
|||||||
"""Filter to filter excludes."""
|
"""Filter to filter excludes."""
|
||||||
|
|
||||||
for exclude in excludes:
|
for exclude in excludes:
|
||||||
if not path.match(exclude):
|
# The home assistant core configuration directory is added as "data"
|
||||||
|
# in the tar file, so we need to prefix that path to the filters.
|
||||||
|
if not path.full_match(f"data/{exclude}"):
|
||||||
continue
|
continue
|
||||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||||
return True
|
return True
|
||||||
|
@ -5,8 +5,9 @@
|
|||||||
"codeowners": ["@home-assistant/core"],
|
"codeowners": ["@home-assistant/core"],
|
||||||
"dependencies": ["http", "websocket_api"],
|
"dependencies": ["http", "websocket_api"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||||
"integration_type": "system",
|
"integration_type": "service",
|
||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
|
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||||
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
75
homeassistant/components/backup/sensor.py
Normal file
75
homeassistant/components/backup/sensor.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
"""Sensor platform for Home Assistant Backup integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import BackupConfigEntry, BackupCoordinatorData
|
||||||
|
from .entity import BackupManagerEntity
|
||||||
|
from .manager import BackupManagerState
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class BackupSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Description for Home Assistant Backup sensor entities."""
|
||||||
|
|
||||||
|
value_fn: Callable[[BackupCoordinatorData], str | datetime | None]
|
||||||
|
|
||||||
|
|
||||||
|
BACKUP_MANAGER_DESCRIPTIONS = (
|
||||||
|
BackupSensorEntityDescription(
|
||||||
|
key="backup_manager_state",
|
||||||
|
translation_key="backup_manager_state",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
options=[state.value for state in BackupManagerState],
|
||||||
|
value_fn=lambda data: data.backup_manager_state,
|
||||||
|
),
|
||||||
|
BackupSensorEntityDescription(
|
||||||
|
key="next_scheduled_automatic_backup",
|
||||||
|
translation_key="next_scheduled_automatic_backup",
|
||||||
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
|
value_fn=lambda data: data.next_scheduled_automatic_backup,
|
||||||
|
),
|
||||||
|
BackupSensorEntityDescription(
|
||||||
|
key="last_successful_automatic_backup",
|
||||||
|
translation_key="last_successful_automatic_backup",
|
||||||
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
|
value_fn=lambda data: data.last_successful_automatic_backup,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: BackupConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Sensor set up for backup config entry."""
|
||||||
|
|
||||||
|
coordinator = config_entry.runtime_data
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
BackupManagerSensor(coordinator, description)
|
||||||
|
for description in BACKUP_MANAGER_DESCRIPTIONS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BackupManagerSensor(BackupManagerEntity, SensorEntity):
|
||||||
|
"""Sensor to track backup manager state."""
|
||||||
|
|
||||||
|
entity_description: BackupSensorEntityDescription
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> str | datetime | None:
|
||||||
|
"""Return native value of entity."""
|
||||||
|
return self.entity_description.value_fn(self.coordinator.data)
|
@ -22,5 +22,24 @@
|
|||||||
"name": "Create automatic backup",
|
"name": "Create automatic backup",
|
||||||
"description": "Creates a new backup with automatic backup settings."
|
"description": "Creates a new backup with automatic backup settings."
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"backup_manager_state": {
|
||||||
|
"name": "Backup Manager State",
|
||||||
|
"state": {
|
||||||
|
"idle": "Idle",
|
||||||
|
"create_backup": "Creating a backup",
|
||||||
|
"receive_backup": "Receiving a backup",
|
||||||
|
"restore_backup": "Restoring a backup"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"next_scheduled_automatic_backup": {
|
||||||
|
"name": "Next scheduled automatic backup"
|
||||||
|
},
|
||||||
|
"last_successful_automatic_backup": {
|
||||||
|
"name": "Last successful automatic backup"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ async def handle_info(
|
|||||||
"backups": list(backups.values()),
|
"backups": list(backups.values()),
|
||||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||||
"last_non_idle_event": manager.last_non_idle_event,
|
"last_action_event": manager.last_action_event,
|
||||||
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
|
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
|
||||||
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
|
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
|
||||||
"state": manager.state,
|
"state": manager.state,
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
"entity": {
|
"entity": {
|
||||||
"climate": {
|
"climate": {
|
||||||
"auto_comfort": {
|
"auto_comfort": {
|
||||||
"name": "Auto comfort"
|
"name": "Auto Comfort"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"fan": {
|
"fan": {
|
||||||
@ -39,25 +39,25 @@
|
|||||||
},
|
},
|
||||||
"number": {
|
"number": {
|
||||||
"comfort_min_speed": {
|
"comfort_min_speed": {
|
||||||
"name": "Auto Comfort Minimum Speed"
|
"name": "Auto Comfort minimum speed"
|
||||||
},
|
},
|
||||||
"comfort_max_speed": {
|
"comfort_max_speed": {
|
||||||
"name": "Auto Comfort Maximum Speed"
|
"name": "Auto Comfort maximum speed"
|
||||||
},
|
},
|
||||||
"comfort_heat_assist_speed": {
|
"comfort_heat_assist_speed": {
|
||||||
"name": "Auto Comfort Heat Assist Speed"
|
"name": "Auto Comfort Heat Assist speed"
|
||||||
},
|
},
|
||||||
"return_to_auto_timeout": {
|
"return_to_auto_timeout": {
|
||||||
"name": "Return to Auto Timeout"
|
"name": "Return to Auto timeout"
|
||||||
},
|
},
|
||||||
"motion_sense_timeout": {
|
"motion_sense_timeout": {
|
||||||
"name": "Motion Sense Timeout"
|
"name": "Motion sense timeout"
|
||||||
},
|
},
|
||||||
"light_return_to_auto_timeout": {
|
"light_return_to_auto_timeout": {
|
||||||
"name": "Light Return to Auto Timeout"
|
"name": "Light return to Auto timeout"
|
||||||
},
|
},
|
||||||
"light_auto_motion_timeout": {
|
"light_auto_motion_timeout": {
|
||||||
"name": "Light Motion Sense Timeout"
|
"name": "Light motion sense timeout"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
@ -76,10 +76,10 @@
|
|||||||
},
|
},
|
||||||
"switch": {
|
"switch": {
|
||||||
"legacy_ir_remote_enable": {
|
"legacy_ir_remote_enable": {
|
||||||
"name": "Legacy IR Remote"
|
"name": "Legacy IR remote"
|
||||||
},
|
},
|
||||||
"led_indicators_enable": {
|
"led_indicators_enable": {
|
||||||
"name": "Led Indicators"
|
"name": "LED indicators"
|
||||||
},
|
},
|
||||||
"comfort_heat_assist_enable": {
|
"comfort_heat_assist_enable": {
|
||||||
"name": "Auto Comfort Heat Assist"
|
"name": "Auto Comfort Heat Assist"
|
||||||
@ -88,10 +88,10 @@
|
|||||||
"name": "Beep"
|
"name": "Beep"
|
||||||
},
|
},
|
||||||
"eco_enable": {
|
"eco_enable": {
|
||||||
"name": "Eco Mode"
|
"name": "Eco mode"
|
||||||
},
|
},
|
||||||
"motion_sense_enable": {
|
"motion_sense_enable": {
|
||||||
"name": "Motion Sense"
|
"name": "Motion sense"
|
||||||
},
|
},
|
||||||
"return_to_auto_enable": {
|
"return_to_auto_enable": {
|
||||||
"name": "Return to Auto"
|
"name": "Return to Auto"
|
||||||
@ -103,7 +103,7 @@
|
|||||||
"name": "Dim to Warm"
|
"name": "Dim to Warm"
|
||||||
},
|
},
|
||||||
"light_return_to_auto_enable": {
|
"light_return_to_auto_enable": {
|
||||||
"name": "Light Return to Auto"
|
"name": "Light return to Auto"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
Platform.BINARY_SENSOR,
|
Platform.BINARY_SENSOR,
|
||||||
Platform.CLIMATE,
|
Platform.CLIMATE,
|
||||||
|
Platform.EVENT,
|
||||||
Platform.FAN,
|
Platform.FAN,
|
||||||
Platform.LIGHT,
|
Platform.LIGHT,
|
||||||
Platform.SELECT,
|
Platform.SELECT,
|
||||||
@ -28,7 +29,6 @@ PLATFORMS = [
|
|||||||
Platform.TIME,
|
Platform.TIME,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
||||||
SYNC_TIME_INTERVAL = timedelta(hours=1)
|
SYNC_TIME_INTERVAL = timedelta(hours=1)
|
||||||
|
|
||||||
|
91
homeassistant/components/balboa/event.py
Normal file
91
homeassistant/components/balboa/event.py
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
"""Support for Balboa events."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from pybalboa import EVENT_UPDATE, SpaClient
|
||||||
|
|
||||||
|
from homeassistant.components.event import EventEntity
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
|
from . import BalboaConfigEntry
|
||||||
|
from .entity import BalboaEntity
|
||||||
|
|
||||||
|
FAULT = "fault"
|
||||||
|
FAULT_DATE = "fault_date"
|
||||||
|
REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5)
|
||||||
|
|
||||||
|
FAULT_MESSAGE_CODE_MAP: dict[int, str] = {
|
||||||
|
15: "sensor_out_of_sync",
|
||||||
|
16: "low_flow",
|
||||||
|
17: "flow_failed",
|
||||||
|
18: "settings_reset",
|
||||||
|
19: "priming_mode",
|
||||||
|
20: "clock_failed",
|
||||||
|
21: "settings_reset",
|
||||||
|
22: "memory_failure",
|
||||||
|
26: "service_sensor_sync",
|
||||||
|
27: "heater_dry",
|
||||||
|
28: "heater_may_be_dry",
|
||||||
|
29: "water_too_hot",
|
||||||
|
30: "heater_too_hot",
|
||||||
|
31: "sensor_a_fault",
|
||||||
|
32: "sensor_b_fault",
|
||||||
|
34: "pump_stuck",
|
||||||
|
35: "hot_fault",
|
||||||
|
36: "gfci_test_failed",
|
||||||
|
37: "standby_mode",
|
||||||
|
}
|
||||||
|
FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values()))
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: BalboaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the spa's events."""
|
||||||
|
async_add_entities([BalboaEventEntity(entry.runtime_data)])
|
||||||
|
|
||||||
|
|
||||||
|
class BalboaEventEntity(BalboaEntity, EventEntity):
|
||||||
|
"""Representation of a Balboa event entity."""
|
||||||
|
|
||||||
|
_attr_event_types = FAULT_EVENT_TYPES
|
||||||
|
_attr_translation_key = FAULT
|
||||||
|
|
||||||
|
def __init__(self, spa: SpaClient) -> None:
|
||||||
|
"""Initialize a Balboa event entity."""
|
||||||
|
super().__init__(spa, FAULT)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_handle_event(self) -> None:
|
||||||
|
"""Handle the fault event."""
|
||||||
|
if not (fault := self._client.fault):
|
||||||
|
return
|
||||||
|
fault_date = fault.fault_datetime.isoformat()
|
||||||
|
if self.state_attributes.get(FAULT_DATE) != fault_date:
|
||||||
|
self._trigger_event(
|
||||||
|
FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message),
|
||||||
|
{FAULT_DATE: fault_date, "code": fault.message_code},
|
||||||
|
)
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Run when entity about to be added to hass."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event))
|
||||||
|
|
||||||
|
async def request_fault_log(now: datetime | None = None) -> None:
|
||||||
|
"""Request the most recent fault log."""
|
||||||
|
await self._client.request_fault_log()
|
||||||
|
|
||||||
|
await request_fault_log()
|
||||||
|
self.async_on_remove(
|
||||||
|
async_track_time_interval(
|
||||||
|
self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL
|
||||||
|
)
|
||||||
|
)
|
@ -57,6 +57,35 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"event": {
|
||||||
|
"fault": {
|
||||||
|
"name": "Fault",
|
||||||
|
"state_attributes": {
|
||||||
|
"event_type": {
|
||||||
|
"state": {
|
||||||
|
"sensor_out_of_sync": "Sensors are out of sync",
|
||||||
|
"low_flow": "The water flow is low",
|
||||||
|
"flow_failed": "The water flow has failed",
|
||||||
|
"settings_reset": "The settings have been reset",
|
||||||
|
"priming_mode": "Priming mode",
|
||||||
|
"clock_failed": "The clock has failed",
|
||||||
|
"memory_failure": "Program memory failure",
|
||||||
|
"service_sensor_sync": "Sensors are out of sync -- call for service",
|
||||||
|
"heater_dry": "The heater is dry",
|
||||||
|
"heater_may_be_dry": "The heater may be dry",
|
||||||
|
"water_too_hot": "The water is too hot",
|
||||||
|
"heater_too_hot": "The heater is too hot",
|
||||||
|
"sensor_a_fault": "Sensor A fault",
|
||||||
|
"sensor_b_fault": "Sensor B fault",
|
||||||
|
"pump_stuck": "A pump may be stuck on",
|
||||||
|
"hot_fault": "Hot fault",
|
||||||
|
"gfci_test_failed": "The GFCI test failed",
|
||||||
|
"standby_mode": "Standby mode (hold mode)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"fan": {
|
"fan": {
|
||||||
"pump": {
|
"pump": {
|
||||||
"name": "Pump {index}"
|
"name": "Pump {index}"
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
"description": "Manually configure your Bang & Olufsen device."
|
"description": "Manually configure your Bang & Olufsen device."
|
||||||
},
|
},
|
||||||
"zeroconf_confirm": {
|
"zeroconf_confirm": {
|
||||||
"title": "Setup Bang & Olufsen device",
|
"title": "Set up Bang & Olufsen device",
|
||||||
"description": "Confirm the configuration of the {model}-{serial_number} @ {host}."
|
"description": "Confirm the configuration of the {model}-{serial_number} @ {host}."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -197,11 +197,11 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"beolink_allstandby": {
|
"beolink_allstandby": {
|
||||||
"name": "Beolink all standby",
|
"name": "Beolink all standby",
|
||||||
"description": "Set all Connected Beolink devices to standby."
|
"description": "Sets all connected Beolink devices to standby."
|
||||||
},
|
},
|
||||||
"beolink_expand": {
|
"beolink_expand": {
|
||||||
"name": "Beolink expand",
|
"name": "Beolink expand",
|
||||||
"description": "Expand current Beolink experience.",
|
"description": "Adds devices to the current Beolink experience.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"all_discovered": {
|
"all_discovered": {
|
||||||
"name": "All discovered",
|
"name": "All discovered",
|
||||||
@ -221,7 +221,7 @@
|
|||||||
},
|
},
|
||||||
"beolink_join": {
|
"beolink_join": {
|
||||||
"name": "Beolink join",
|
"name": "Beolink join",
|
||||||
"description": "Join a Beolink experience.",
|
"description": "Joins a Beolink experience.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"beolink_jid": {
|
"beolink_jid": {
|
||||||
"name": "Beolink JID",
|
"name": "Beolink JID",
|
||||||
@ -241,11 +241,11 @@
|
|||||||
},
|
},
|
||||||
"beolink_leave": {
|
"beolink_leave": {
|
||||||
"name": "Beolink leave",
|
"name": "Beolink leave",
|
||||||
"description": "Leave a Beolink experience."
|
"description": "Leaves a Beolink experience."
|
||||||
},
|
},
|
||||||
"beolink_unexpand": {
|
"beolink_unexpand": {
|
||||||
"name": "Beolink unexpand",
|
"name": "Beolink unexpand",
|
||||||
"description": "Unexpand from current Beolink experience.",
|
"description": "Removes devices from the current Beolink experience.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"beolink_jids": {
|
"beolink_jids": {
|
||||||
"name": "Beolink JIDs",
|
"name": "Beolink JIDs",
|
||||||
@ -274,7 +274,7 @@
|
|||||||
"message": "An error occurred while attempting to play {media_type}: {error_message}."
|
"message": "An error occurred while attempting to play {media_type}: {error_message}."
|
||||||
},
|
},
|
||||||
"invalid_grouping_entity": {
|
"invalid_grouping_entity": {
|
||||||
"message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
|
"message": "Entity with ID {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
|
||||||
},
|
},
|
||||||
"invalid_sound_mode": {
|
"invalid_sound_mode": {
|
||||||
"message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}."
|
"message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}."
|
||||||
|
@ -501,18 +501,16 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
|||||||
return
|
return
|
||||||
|
|
||||||
# presets and inputs might have the same name; presets have priority
|
# presets and inputs might have the same name; presets have priority
|
||||||
url: str | None = None
|
|
||||||
for input_ in self._inputs:
|
for input_ in self._inputs:
|
||||||
if input_.text == source:
|
if input_.text == source:
|
||||||
url = input_.url
|
await self._player.play_url(input_.url)
|
||||||
|
return
|
||||||
for preset in self._presets:
|
for preset in self._presets:
|
||||||
if preset.name == source:
|
if preset.name == source:
|
||||||
url = preset.url
|
await self._player.load_preset(preset.id)
|
||||||
|
return
|
||||||
|
|
||||||
if url is None:
|
raise ServiceValidationError(f"Source {source} not found")
|
||||||
raise ServiceValidationError(f"Source {source} not found")
|
|
||||||
|
|
||||||
await self._player.play_url(url)
|
|
||||||
|
|
||||||
async def async_clear_playlist(self) -> None:
|
async def async_clear_playlist(self) -> None:
|
||||||
"""Clear players playlist."""
|
"""Clear players playlist."""
|
||||||
|
@ -311,11 +311,24 @@ async def async_update_device(
|
|||||||
update the device with the new location so they can
|
update the device with the new location so they can
|
||||||
figure out where the adapter is.
|
figure out where the adapter is.
|
||||||
"""
|
"""
|
||||||
|
address = details[ADAPTER_ADDRESS]
|
||||||
|
connections = {(dr.CONNECTION_BLUETOOTH, address)}
|
||||||
device_registry = dr.async_get(hass)
|
device_registry = dr.async_get(hass)
|
||||||
|
# We only have one device for the config entry
|
||||||
|
# so if the address has been corrected, make
|
||||||
|
# sure the device entry reflects the correct
|
||||||
|
# address
|
||||||
|
for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id):
|
||||||
|
for conn_type, conn_value in device.connections:
|
||||||
|
if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address:
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device.id, new_connections=connections
|
||||||
|
)
|
||||||
|
break
|
||||||
device_entry = device_registry.async_get_or_create(
|
device_entry = device_registry.async_get_or_create(
|
||||||
config_entry_id=entry.entry_id,
|
config_entry_id=entry.entry_id,
|
||||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
name=adapter_human_name(adapter, address),
|
||||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
connections=connections,
|
||||||
manufacturer=details[ADAPTER_MANUFACTURER],
|
manufacturer=details[ADAPTER_MANUFACTURER],
|
||||||
model=adapter_model(details),
|
model=adapter_model(details),
|
||||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||||
@ -342,9 +355,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return True
|
||||||
address = entry.unique_id
|
address = entry.unique_id
|
||||||
assert address is not None
|
assert address is not None
|
||||||
assert source_entry is not None
|
|
||||||
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
||||||
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
||||||
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
||||||
|
@ -186,16 +186,28 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
"""Handle a flow initialized by an external scanner."""
|
"""Handle a flow initialized by an external scanner."""
|
||||||
source = user_input[CONF_SOURCE]
|
source = user_input[CONF_SOURCE]
|
||||||
await self.async_set_unique_id(source)
|
await self.async_set_unique_id(source)
|
||||||
|
source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID]
|
||||||
data = {
|
data = {
|
||||||
CONF_SOURCE: source,
|
CONF_SOURCE: source,
|
||||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||||
}
|
}
|
||||||
self._abort_if_unique_id_configured(updates=data)
|
self._abort_if_unique_id_configured(updates=data)
|
||||||
manager = get_manager()
|
for entry in self._async_current_entries(include_ignore=False):
|
||||||
scanner = manager.async_scanner_by_source(source)
|
# If the mac address needs to be corrected, migrate
|
||||||
|
# the config entry to the new mac address
|
||||||
|
if (
|
||||||
|
entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id
|
||||||
|
and entry.unique_id != source
|
||||||
|
):
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
entry, unique_id=source, data={**entry.data, **data}
|
||||||
|
)
|
||||||
|
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||||
|
return self.async_abort(reason="already_configured")
|
||||||
|
scanner = get_manager().async_scanner_by_source(source)
|
||||||
assert scanner is not None
|
assert scanner is not None
|
||||||
return self.async_create_entry(title=scanner.name, data=data)
|
return self.async_create_entry(title=scanner.name, data=data)
|
||||||
|
|
||||||
|
@ -18,9 +18,9 @@
|
|||||||
"bleak==0.22.3",
|
"bleak==0.22.3",
|
||||||
"bleak-retry-connector==3.9.0",
|
"bleak-retry-connector==3.9.0",
|
||||||
"bluetooth-adapters==0.21.4",
|
"bluetooth-adapters==0.21.4",
|
||||||
"bluetooth-auto-recovery==1.4.4",
|
"bluetooth-auto-recovery==1.4.5",
|
||||||
"bluetooth-data-tools==1.23.4",
|
"bluetooth-data-tools==1.26.5",
|
||||||
"dbus-fast==2.33.0",
|
"dbus-fast==2.43.0",
|
||||||
"habluetooth==3.24.1"
|
"habluetooth==3.37.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
62
homeassistant/components/bosch_alarm/__init__.py
Normal file
62
homeassistant/components/bosch_alarm/__init__.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
"""The Bosch Alarm integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ssl import SSLError
|
||||||
|
|
||||||
|
from bosch_alarm_mode2 import Panel
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
|
||||||
|
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||||
|
|
||||||
|
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL]
|
||||||
|
|
||||||
|
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||||
|
"""Set up Bosch Alarm from a config entry."""
|
||||||
|
|
||||||
|
panel = Panel(
|
||||||
|
host=entry.data[CONF_HOST],
|
||||||
|
port=entry.data[CONF_PORT],
|
||||||
|
automation_code=entry.data.get(CONF_PASSWORD),
|
||||||
|
installer_or_user_code=entry.data.get(
|
||||||
|
CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await panel.connect()
|
||||||
|
except (PermissionError, ValueError) as err:
|
||||||
|
await panel.disconnect()
|
||||||
|
raise ConfigEntryNotReady from err
|
||||||
|
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
|
||||||
|
await panel.disconnect()
|
||||||
|
raise ConfigEntryNotReady("Connection failed") from err
|
||||||
|
|
||||||
|
entry.runtime_data = panel
|
||||||
|
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
|
||||||
|
device_registry.async_get_or_create(
|
||||||
|
config_entry_id=entry.entry_id,
|
||||||
|
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||||
|
name=f"Bosch {panel.model}",
|
||||||
|
manufacturer="Bosch Security Systems",
|
||||||
|
model=panel.model,
|
||||||
|
sw_version=panel.firmware_version,
|
||||||
|
)
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||||
|
await entry.runtime_data.disconnect()
|
||||||
|
return unload_ok
|
109
homeassistant/components/bosch_alarm/alarm_control_panel.py
Normal file
109
homeassistant/components/bosch_alarm/alarm_control_panel.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
"""Support for Bosch Alarm Panel."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from bosch_alarm_mode2 import Panel
|
||||||
|
|
||||||
|
from homeassistant.components.alarm_control_panel import (
|
||||||
|
AlarmControlPanelEntity,
|
||||||
|
AlarmControlPanelEntityFeature,
|
||||||
|
AlarmControlPanelState,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import BoschAlarmConfigEntry
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: BoschAlarmConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up control panels for each area."""
|
||||||
|
panel = config_entry.runtime_data
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
AreaAlarmControlPanel(
|
||||||
|
panel,
|
||||||
|
area_id,
|
||||||
|
config_entry.unique_id or config_entry.entry_id,
|
||||||
|
)
|
||||||
|
for area_id in panel.areas
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AreaAlarmControlPanel(AlarmControlPanelEntity):
|
||||||
|
"""An alarm control panel entity for a bosch alarm panel."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_supported_features = (
|
||||||
|
AlarmControlPanelEntityFeature.ARM_HOME
|
||||||
|
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||||
|
)
|
||||||
|
_attr_code_arm_required = False
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
|
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
|
||||||
|
"""Initialise a Bosch Alarm control panel entity."""
|
||||||
|
self.panel = panel
|
||||||
|
self._area = panel.areas[area_id]
|
||||||
|
self._area_id = area_id
|
||||||
|
self._attr_unique_id = f"{unique_id}_area_{area_id}"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||||
|
name=self._area.name,
|
||||||
|
manufacturer="Bosch Security Systems",
|
||||||
|
via_device=(
|
||||||
|
DOMAIN,
|
||||||
|
unique_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||||
|
"""Return the state of the alarm."""
|
||||||
|
if self._area.is_triggered():
|
||||||
|
return AlarmControlPanelState.TRIGGERED
|
||||||
|
if self._area.is_disarmed():
|
||||||
|
return AlarmControlPanelState.DISARMED
|
||||||
|
if self._area.is_arming():
|
||||||
|
return AlarmControlPanelState.ARMING
|
||||||
|
if self._area.is_pending():
|
||||||
|
return AlarmControlPanelState.PENDING
|
||||||
|
if self._area.is_part_armed():
|
||||||
|
return AlarmControlPanelState.ARMED_HOME
|
||||||
|
if self._area.is_all_armed():
|
||||||
|
return AlarmControlPanelState.ARMED_AWAY
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||||
|
"""Disarm this panel."""
|
||||||
|
await self.panel.area_disarm(self._area_id)
|
||||||
|
|
||||||
|
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||||
|
"""Send arm home command."""
|
||||||
|
await self.panel.area_arm_part(self._area_id)
|
||||||
|
|
||||||
|
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||||
|
"""Send arm away command."""
|
||||||
|
await self.panel.area_arm_all(self._area_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if entity is available."""
|
||||||
|
return self.panel.connection_status()
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Run when entity attached to hass."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
self._area.status_observer.attach(self.schedule_update_ha_state)
|
||||||
|
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||||
|
|
||||||
|
async def async_will_remove_from_hass(self) -> None:
|
||||||
|
"""Run when entity removed from hass."""
|
||||||
|
await super().async_will_remove_from_hass()
|
||||||
|
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||||
|
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
165
homeassistant/components/bosch_alarm/config_flow.py
Normal file
165
homeassistant/components/bosch_alarm/config_flow.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
"""Config flow for Bosch Alarm integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import ssl
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from bosch_alarm_mode2 import Panel
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import (
|
||||||
|
CONF_CODE,
|
||||||
|
CONF_HOST,
|
||||||
|
CONF_MODEL,
|
||||||
|
CONF_PASSWORD,
|
||||||
|
CONF_PORT,
|
||||||
|
)
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
|
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): str,
|
||||||
|
vol.Required(CONF_PORT, default=7700): cv.positive_int,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_USER_CODE): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_INSTALLER_CODE): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
STEP_AUTH_DATA_SCHEMA_BG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str})
|
||||||
|
|
||||||
|
|
||||||
|
async def try_connect(
|
||||||
|
data: dict[str, Any], load_selector: int = 0
|
||||||
|
) -> tuple[str, int | None]:
|
||||||
|
"""Validate the user input allows us to connect.
|
||||||
|
|
||||||
|
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||||
|
"""
|
||||||
|
panel = Panel(
|
||||||
|
host=data[CONF_HOST],
|
||||||
|
port=data[CONF_PORT],
|
||||||
|
automation_code=data.get(CONF_PASSWORD),
|
||||||
|
installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await panel.connect(load_selector)
|
||||||
|
finally:
|
||||||
|
await panel.disconnect()
|
||||||
|
|
||||||
|
return (panel.model, panel.serial_number)
|
||||||
|
|
||||||
|
|
||||||
|
class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Bosch Alarm."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Init config flow."""
|
||||||
|
|
||||||
|
self._data: dict[str, Any] = {}
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
try:
|
||||||
|
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||||
|
(model, serial) = await try_connect(user_input, 0)
|
||||||
|
except (
|
||||||
|
OSError,
|
||||||
|
ConnectionRefusedError,
|
||||||
|
ssl.SSLError,
|
||||||
|
asyncio.exceptions.TimeoutError,
|
||||||
|
) as e:
|
||||||
|
_LOGGER.error("Connection Error: %s", e)
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
self._data = user_input
|
||||||
|
self._data[CONF_MODEL] = model
|
||||||
|
return await self.async_step_auth()
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_auth(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the auth step."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
# Each model variant requires a different authentication flow
|
||||||
|
if "Solution" in self._data[CONF_MODEL]:
|
||||||
|
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
|
||||||
|
elif "AMAX" in self._data[CONF_MODEL]:
|
||||||
|
schema = STEP_AUTH_DATA_SCHEMA_AMAX
|
||||||
|
else:
|
||||||
|
schema = STEP_AUTH_DATA_SCHEMA_BG
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._data.update(user_input)
|
||||||
|
try:
|
||||||
|
(model, serial_number) = await try_connect(
|
||||||
|
self._data, Panel.LOAD_EXTENDED_INFO
|
||||||
|
)
|
||||||
|
except (PermissionError, ValueError) as e:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
_LOGGER.error("Authentication Error: %s", e)
|
||||||
|
except (
|
||||||
|
OSError,
|
||||||
|
ConnectionRefusedError,
|
||||||
|
ssl.SSLError,
|
||||||
|
TimeoutError,
|
||||||
|
) as e:
|
||||||
|
_LOGGER.error("Connection Error: %s", e)
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
if serial_number:
|
||||||
|
await self.async_set_unique_id(str(serial_number))
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
else:
|
||||||
|
self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]})
|
||||||
|
return self.async_create_entry(title=f"Bosch {model}", data=self._data)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="auth",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||||
|
errors=errors,
|
||||||
|
)
|
6
homeassistant/components/bosch_alarm/const.py
Normal file
6
homeassistant/components/bosch_alarm/const.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"""Constants for the Bosch Alarm integration."""
|
||||||
|
|
||||||
|
DOMAIN = "bosch_alarm"
|
||||||
|
HISTORY_ATTR = "history"
|
||||||
|
CONF_INSTALLER_CODE = "installer_code"
|
||||||
|
CONF_USER_CODE = "user_code"
|
11
homeassistant/components/bosch_alarm/manifest.json
Normal file
11
homeassistant/components/bosch_alarm/manifest.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"domain": "bosch_alarm",
|
||||||
|
"name": "Bosch Alarm",
|
||||||
|
"codeowners": ["@mag1024", "@sanjay900"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
|
||||||
|
"integration_type": "device",
|
||||||
|
"iot_class": "local_push",
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["bosch-alarm-mode2==0.4.3"]
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user