mirror of
https://github.com/home-assistant/core.git
synced 2025-11-08 10:29:27 +00:00
Compare commits
18 Commits
feedreader
...
manual_tri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
635669278c | ||
|
|
1c5eb92c9c | ||
|
|
3337dd4ed7 | ||
|
|
f1d21685e6 | ||
|
|
73f27549e4 | ||
|
|
1882b914dc | ||
|
|
06f99dc9ba | ||
|
|
2e2c718d94 | ||
|
|
b8f56a6ed6 | ||
|
|
db37dbec03 | ||
|
|
579f44468e | ||
|
|
d452e957c9 | ||
|
|
5f9bcd583b | ||
|
|
c0c508c7a2 | ||
|
|
13f5adfa84 | ||
|
|
a07a3a61bf | ||
|
|
848162debd | ||
|
|
07cd669bc1 |
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: Report an issue with Home Assistant Core
|
||||
description: Report an issue with Home Assistant Core.
|
||||
type: Bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
28
.github/workflows/builder.yml
vendored
28
.github/workflows/builder.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,14 +190,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -256,14 +256,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -330,14 +330,14 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -502,7 +502,7 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
uses: actions/attest-build-provenance@bd77c077858b8d561b7a36cbe48ef4cc642ca39d # v2.2.2
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
231
.github/workflows/ci.yaml
vendored
231
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 12
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.5"
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -89,7 +89,6 @@ jobs:
|
||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||
tests: ${{ steps.info.outputs.tests }}
|
||||
lint_only: ${{ steps.info.outputs.lint_only }}
|
||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -143,7 +142,6 @@ jobs:
|
||||
test_group_count=10
|
||||
tests="[]"
|
||||
tests_glob=""
|
||||
lint_only=""
|
||||
skip_coverage=""
|
||||
|
||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||
@@ -194,17 +192,6 @@ jobs:
|
||||
test_full_suite="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event_name }}" == "push" \
|
||||
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||
then
|
||||
lint_only="true"
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||
then
|
||||
@@ -230,8 +217,6 @@ jobs:
|
||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||
echo "tests_glob: ${tests_glob}"
|
||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||
echo "lint_only": ${lint_only}
|
||||
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -249,13 +234,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -271,7 +256,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -294,14 +279,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -310,7 +295,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -334,14 +319,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -350,7 +335,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -374,14 +359,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -390,7 +375,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -484,7 +469,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -497,7 +482,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -505,7 +490,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -552,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -587,13 +572,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -620,13 +605,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -638,25 +623,6 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all validate
|
||||
|
||||
dependency-review:
|
||||
name: Dependency review
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& needs.info.outputs.requirements == 'true'
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.6.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
audit-licenses:
|
||||
name: Audit licenses
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -677,13 +643,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -695,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -720,13 +686,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -767,13 +733,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -812,7 +778,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -825,7 +791,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -833,7 +799,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -863,7 +829,11 @@ jobs:
|
||||
prepare-pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -889,13 +859,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -907,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -916,7 +886,11 @@ jobs:
|
||||
pytest-full:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -949,13 +923,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -968,7 +942,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -988,7 +962,6 @@ jobs:
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||
@@ -1007,24 +980,18 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Remove pytest_buckets
|
||||
run: rm pytest_buckets.txt
|
||||
- name: Check dirty
|
||||
@@ -1042,7 +1009,11 @@ jobs:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.mariadb_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1074,13 +1045,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1117,7 +1088,6 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1138,7 +1108,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1146,19 +1116,12 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1174,7 +1137,11 @@ jobs:
|
||||
POSTGRES_PASSWORD: password
|
||||
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.postgresql_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1208,13 +1175,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1251,7 +1218,6 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1273,7 +1239,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1281,19 +1247,12 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1312,12 +1271,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1326,7 +1285,11 @@ jobs:
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.tests_glob
|
||||
&& needs.info.outputs.test_full_suite == 'false'
|
||||
needs:
|
||||
@@ -1359,13 +1322,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1402,7 +1365,6 @@ jobs:
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
@@ -1420,24 +1382,18 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
@@ -1454,37 +1410,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
upload-test-results:
|
||||
name: Upload test results to Codecov
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
- pytest-partial
|
||||
- pytest-full
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.15
|
||||
uses: github/codeql-action/init@v3.28.10
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.15
|
||||
uses: github/codeql-action/analyze@v3.28.10
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
28
.github/workflows/wheels.yml
vendored
28
.github/workflows/wheels.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -69,7 +69,6 @@ test-reports/
|
||||
test-results.xml
|
||||
test-output.xml
|
||||
pytest-*.txt
|
||||
junit.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.0
|
||||
rev: v0.9.8
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -119,7 +119,6 @@ homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
homeassistant.components.braviatv.*
|
||||
homeassistant.components.bring.*
|
||||
homeassistant.components.brother.*
|
||||
@@ -137,7 +136,6 @@ homeassistant.components.clicksend.*
|
||||
homeassistant.components.climate.*
|
||||
homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
@@ -291,7 +289,6 @@ homeassistant.components.kaleidescape.*
|
||||
homeassistant.components.knocki.*
|
||||
homeassistant.components.knx.*
|
||||
homeassistant.components.kraken.*
|
||||
homeassistant.components.kulersky.*
|
||||
homeassistant.components.lacrosse.*
|
||||
homeassistant.components.lacrosse_view.*
|
||||
homeassistant.components.lamarzocco.*
|
||||
@@ -365,7 +362,6 @@ homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
homeassistant.components.nut.*
|
||||
homeassistant.components.ohme.*
|
||||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onedrive.*
|
||||
@@ -415,7 +411,6 @@ homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.remote_calendar.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
homeassistant.components.repairs.*
|
||||
|
||||
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -4,7 +4,7 @@
|
||||
{
|
||||
"label": "Run Home Assistant Core",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
|
||||
"command": "hass -c ./config",
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
|
||||
23
CODEOWNERS
generated
23
CODEOWNERS
generated
@@ -216,8 +216,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/homeassistant/components/bosch_shc/ @tschamm
|
||||
/tests/components/bosch_shc/ @tschamm
|
||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||
@@ -432,7 +430,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
@@ -572,8 +570,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_cloud/ @lufton @tronikos
|
||||
/homeassistant/components/google_drive/ @tronikos
|
||||
/tests/components/google_drive/ @tronikos
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
||||
/homeassistant/components/google_mail/ @tkdrob
|
||||
/tests/components/google_mail/ @tkdrob
|
||||
/homeassistant/components/google_photos/ @allenporter
|
||||
@@ -704,8 +702,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/image_upload/ @home-assistant/core
|
||||
/homeassistant/components/imap/ @jbouwh
|
||||
/tests/components/imap/ @jbouwh
|
||||
/homeassistant/components/imeon_inverter/ @Imeon-Energy
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
@@ -937,8 +933,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||
/homeassistant/components/microbees/ @microBeesTech
|
||||
/tests/components/microbees/ @microBeesTech
|
||||
/homeassistant/components/miele/ @astrandb
|
||||
/tests/components/miele/ @astrandb
|
||||
/homeassistant/components/mikrotik/ @engrbm87
|
||||
/tests/components/mikrotik/ @engrbm87
|
||||
/homeassistant/components/mill/ @danielhiversen
|
||||
@@ -1189,8 +1183,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
/tests/components/pterodactyl/ @elmurato
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
/tests/components/pure_energie/ @klaasnicolaas
|
||||
/homeassistant/components/purpleair/ @bachya
|
||||
@@ -1260,8 +1252,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
@@ -1391,6 +1381,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/siren/ @home-assistant/core @raman325
|
||||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
@@ -1483,6 +1474,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
/tests/components/sunweg/ @rokam
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
@@ -1536,8 +1529,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -25,13 +25,13 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.6.10
|
||||
RUN pip3 install uv==0.6.1
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -19,4 +19,4 @@ labels:
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
org.opencontainers.image.licenses: Apache License 2.0
|
||||
|
||||
@@ -178,15 +178,6 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.set_default_verify_paths,
|
||||
object=SSLContext,
|
||||
function="set_default_verify_paths",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.open,
|
||||
object=Path,
|
||||
|
||||
@@ -53,7 +53,6 @@ from .components import (
|
||||
logbook as logbook_pre_import, # noqa: F401
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
onboarding as onboarding_pre_import, # noqa: F401
|
||||
person as person_pre_import, # noqa: F401
|
||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
repairs as repairs_pre_import, # noqa: F401
|
||||
search as search_pre_import, # noqa: F401
|
||||
@@ -82,7 +81,6 @@ from .helpers import (
|
||||
entity,
|
||||
entity_registry,
|
||||
floor_registry,
|
||||
frame,
|
||||
issue_registry,
|
||||
label_registry,
|
||||
recorder,
|
||||
@@ -94,7 +92,6 @@ from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .loader import Integration
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
# that it is not part of the public API and should not be used
|
||||
@@ -301,6 +298,14 @@ async def async_setup_hass(
|
||||
|
||||
return hass
|
||||
|
||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
||||
"""Stop hass."""
|
||||
# Ask integrations to shut down. It's messy but we can't
|
||||
# do a clean stop without knowing what is broken
|
||||
with contextlib.suppress(TimeoutError):
|
||||
async with hass.timeout.async_timeout(10):
|
||||
await hass.async_stop()
|
||||
|
||||
hass = await create_hass()
|
||||
|
||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||
@@ -339,7 +344,7 @@ async def async_setup_hass(
|
||||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await hass.async_stop(force=True)
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
@@ -347,7 +352,7 @@ async def async_setup_hass(
|
||||
"Unable to set up core integrations. Activating recovery mode"
|
||||
)
|
||||
recovery_mode = True
|
||||
await hass.async_stop(force=True)
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(
|
||||
@@ -362,7 +367,7 @@ async def async_setup_hass(
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await hass.async_stop(force=True)
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
if old_logging:
|
||||
@@ -436,10 +441,9 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
entity.async_setup(hass)
|
||||
frame.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
translation.async_setup(hass)
|
||||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
@@ -660,10 +664,11 @@ def _create_log_file(
|
||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
return err_handler
|
||||
|
||||
@@ -713,25 +718,20 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
return domains
|
||||
|
||||
|
||||
async def _async_resolve_domains_and_preload(
|
||||
async def _async_resolve_domains_to_setup(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> tuple[dict[str, Integration], dict[str, Integration]]:
|
||||
"""Resolve all dependencies and return integrations to set up.
|
||||
|
||||
The return value is a tuple of two dictionaries:
|
||||
- The first dictionary contains integrations
|
||||
specified by the configuration (including config entries).
|
||||
- The second dictionary contains the same integrations as the first dictionary
|
||||
together with all their dependencies.
|
||||
"""
|
||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||
"""Resolve all dependencies and return list of domains to set up."""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
needed_requirements: set[str] = set()
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
# Ensure base platforms that have platform integrations are added to
|
||||
# to `domains_to_setup so they can be setup first instead of
|
||||
# discovering them when later when a config entry setup task
|
||||
# notices its needed and there is already a long line to use
|
||||
# the import executor.
|
||||
#
|
||||
# For example if we have
|
||||
# sensor:
|
||||
@@ -747,78 +747,111 @@ async def _async_resolve_domains_and_preload(
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
additional_domains_to_process = {
|
||||
# Load manifests for base platforms and platform based integrations
|
||||
# that are defined under base platforms right away since we do not require
|
||||
# the manifest to list them as dependencies and we want to avoid the lock
|
||||
# contention when multiple integrations try to load them at once
|
||||
additional_manifests_to_load = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
}
|
||||
|
||||
translations_to_load = additional_manifests_to_load.copy()
|
||||
|
||||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start right-away
|
||||
integrations_or_excs = await loader.async_get_integrations(
|
||||
hass, {*domains_to_setup, *additional_domains_to_process}
|
||||
)
|
||||
# Eliminate those missing or with invalid manifest
|
||||
integrations_to_process = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_or_excs.items()
|
||||
if isinstance(itg, Integration)
|
||||
}
|
||||
integrations_dependencies = await loader.resolve_integrations_dependencies(
|
||||
hass, integrations_to_process.values()
|
||||
)
|
||||
# Eliminate those without valid dependencies
|
||||
integrations_to_process = {
|
||||
domain: integrations_to_process[domain] for domain in integrations_dependencies
|
||||
}
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve or additional_manifests_to_load:
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
|
||||
integrations_to_setup = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_to_process.items()
|
||||
if domain in domains_to_setup
|
||||
}
|
||||
all_integrations_to_setup = integrations_to_setup.copy()
|
||||
all_integrations_to_setup.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep))
|
||||
for domain in integrations_to_setup
|
||||
for dep in integrations_dependencies[domain].difference(
|
||||
all_integrations_to_setup
|
||||
)
|
||||
)
|
||||
if additional_manifests_to_load:
|
||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
||||
additional_manifests_to_load.clear()
|
||||
else:
|
||||
to_get = old_to_resolve
|
||||
|
||||
# Gather requirements for all integrations,
|
||||
# their dependencies and after dependencies.
|
||||
# To gather all the requirements we must ignore exceptions here.
|
||||
# The exceptions will be detected and handled later in the bootstrap process.
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, integrations_to_process.values(), ignore_exceptions=True
|
||||
)
|
||||
)
|
||||
integrations_requirements = {
|
||||
domain: itg.requirements for domain, itg in integrations_to_process.items()
|
||||
}
|
||||
integrations_requirements.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
|
||||
for deps in integrations_after_dependencies.values()
|
||||
for dep in deps.difference(integrations_requirements)
|
||||
)
|
||||
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
|
||||
manifest_deps: set[str] = set()
|
||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
||||
integrations_to_process: list[loader.Integration] = []
|
||||
|
||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
||||
if not isinstance(itg, loader.Integration):
|
||||
continue
|
||||
integration_cache[domain] = itg
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
# Make sure manifests for dependencies are loaded in the next
|
||||
# loop to try to group as many as manifest loads in a single
|
||||
# call to avoid the creating one-off executor jobs later in
|
||||
# the setup process
|
||||
additional_manifests_to_load.update(
|
||||
dep
|
||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
||||
if dep not in integration_cache
|
||||
)
|
||||
|
||||
if domain not in old_to_resolve:
|
||||
continue
|
||||
|
||||
integrations_to_process.append(itg)
|
||||
manifest_deps.update(itg.dependencies)
|
||||
manifest_deps.update(itg.after_dependencies)
|
||||
if not itg.all_dependencies_resolved:
|
||||
resolve_dependencies_tasks.append(
|
||||
create_eager_task(
|
||||
itg.resolve_dependencies(),
|
||||
name=f"resolve dependencies {domain}",
|
||||
loop=hass.loop,
|
||||
)
|
||||
)
|
||||
|
||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
||||
# If there are dependencies, try to preload all
|
||||
# the integrations manifest at once and add them
|
||||
# to the list of requirements we need to install
|
||||
# so we can try to check if they are already installed
|
||||
# in a single call below which avoids each integration
|
||||
# having to wait for the lock to do it individually
|
||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
||||
for dependant_domain, dependant_itg in deps.items():
|
||||
if isinstance(dependant_itg, loader.Integration):
|
||||
integration_cache[dependant_domain] = dependant_itg
|
||||
needed_requirements.update(dependant_itg.requirements)
|
||||
|
||||
if resolve_dependencies_tasks:
|
||||
await asyncio.gather(*resolve_dependencies_tasks)
|
||||
|
||||
for itg in integrations_to_process:
|
||||
try:
|
||||
all_deps = itg.all_dependencies
|
||||
except RuntimeError:
|
||||
# Integration.all_dependencies raises RuntimeError if
|
||||
# dependencies could not be resolved
|
||||
continue
|
||||
for dep in all_deps:
|
||||
if dep in domains_to_setup:
|
||||
continue
|
||||
domains_to_setup.add(dep)
|
||||
to_resolve.add(dep)
|
||||
|
||||
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
||||
|
||||
# Optimistically check if requirements are already installed
|
||||
# ahead of setting up the integrations so we can prime the cache
|
||||
# We do not wait for this since it's an optimization only
|
||||
# We do not wait for this since its an optimization only
|
||||
hass.async_create_background_task(
|
||||
requirements.async_load_installed_versions(hass, all_requirements),
|
||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
||||
"check installed requirements",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
#
|
||||
# Only add the domains_to_setup after we finish resolving
|
||||
# as new domains are likely to added in the process
|
||||
#
|
||||
translations_to_load.update(domains_to_setup)
|
||||
# Start loading translations for all integrations we are going to set up
|
||||
# in the background so they are ready when we need them. This avoids a
|
||||
# lot of waiting for the translation load lock and a thundering herd of
|
||||
@@ -829,7 +862,6 @@ async def _async_resolve_domains_and_preload(
|
||||
# hold the translation load lock and if anything is fast enough to
|
||||
# wait for the translation load lock, loading will be done by the
|
||||
# time it gets to it.
|
||||
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
|
||||
hass.async_create_background_task(
|
||||
translation.async_load_integrations(hass, translations_to_load),
|
||||
"load translations",
|
||||
@@ -841,13 +873,13 @@ async def _async_resolve_domains_and_preload(
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *all_integrations_to_setup]
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
),
|
||||
"preload storage",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
return integrations_to_setup, all_integrations_to_setup
|
||||
return domains_to_setup, integration_cache
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
@@ -857,84 +889,69 @@ async def _async_set_up_integrations(
|
||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||
watcher.async_start()
|
||||
|
||||
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||
hass, config
|
||||
)
|
||||
# Detect all cycles
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, all_integrations.values(), set(all_integrations)
|
||||
)
|
||||
)
|
||||
all_domains = set(integrations_after_dependencies)
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
stage_2_domains = domains_to_setup.copy()
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in all_domains:
|
||||
if "recorder" in domains_to_setup:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in all_domains:
|
||||
if "backup" in domains_to_setup:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group, timeout)
|
||||
(name, domain_group & domains_to_setup, timeout)
|
||||
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||
),
|
||||
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
|
||||
("2", domains, STAGE_2_TIMEOUT),
|
||||
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
|
||||
]
|
||||
|
||||
_LOGGER.info("Setting up stage 0")
|
||||
for name, domain_group, timeout in stages:
|
||||
stage_domains_unfiltered = domain_group & all_domains
|
||||
if not stage_domains_unfiltered:
|
||||
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
|
||||
_LOGGER.info("Setting up stage 0 and 1")
|
||||
for name, domain_group, timeout in stage_0_and_1_domains:
|
||||
if not domain_group:
|
||||
continue
|
||||
|
||||
stage_domains = stage_domains_unfiltered - hass.config.components
|
||||
if not stage_domains:
|
||||
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
|
||||
continue
|
||||
|
||||
stage_dep_domains_unfiltered = {
|
||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||
to_be_loaded = domain_group.copy()
|
||||
to_be_loaded.update(
|
||||
dep
|
||||
for domain in stage_domains
|
||||
for dep in integrations_after_dependencies[domain]
|
||||
if dep not in stage_domains
|
||||
}
|
||||
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
name,
|
||||
stage_domains,
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
for domain in domain_group
|
||||
if (integration := integration_cache.get(domain)) is not None
|
||||
for dep in integration.all_dependencies
|
||||
)
|
||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||
stage_2_domains -= to_be_loaded
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
else:
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for %s waiting on %s - moving forward",
|
||||
name,
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
# Add after dependencies when setting up stage 2 domains
|
||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
||||
|
||||
if stage_2_domains:
|
||||
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
async with hass.timeout.async_timeout(
|
||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||
):
|
||||
await _async_setup_multi_components(hass, stage_2_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for stage %s waiting on %s - moving forward",
|
||||
name,
|
||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
@@ -1036,6 +1053,8 @@ async def _async_setup_multi_components(
|
||||
config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up multiple domains. Log on failure."""
|
||||
# Avoid creating tasks for domains that were setup in a previous stage
|
||||
domains_not_yet_setup = domains - hass.config.components
|
||||
# Create setup tasks for base platforms first since everything will have
|
||||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
@@ -1045,7 +1064,9 @@ async def _async_setup_multi_components(
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
)
|
||||
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
|
||||
for domain in sorted(
|
||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
||||
)
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "bosch",
|
||||
"name": "Bosch",
|
||||
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "eve",
|
||||
"name": "Eve",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||
"iot_standards": ["matter"]
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.components.weather import (
|
||||
|
||||
API_METRIC: Final = "Metric"
|
||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||
ATTR_CATEGORY_VALUE = "CategoryValue"
|
||||
ATTR_CATEGORY: Final = "Category"
|
||||
ATTR_DIRECTION: Final = "Direction"
|
||||
ATTR_ENGLISH: Final = "English"
|
||||
ATTR_LEVEL: Final = "level"
|
||||
@@ -55,18 +55,5 @@ CONDITION_MAP = {
|
||||
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
||||
for cond_code in cond_codes
|
||||
}
|
||||
AIR_QUALITY_CATEGORY_MAP = {
|
||||
1: "good",
|
||||
2: "moderate",
|
||||
3: "unhealthy",
|
||||
4: "very_unhealthy",
|
||||
5: "hazardous",
|
||||
}
|
||||
POLLEN_CATEGORY_MAP = {
|
||||
1: "low",
|
||||
2: "moderate",
|
||||
3: "high",
|
||||
4: "very_high",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
||||
@@ -75,11 +75,7 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_current_conditions()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="current_conditions_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
@@ -121,15 +117,9 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
"""Update data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
result = await self.accuweather.async_get_daily_forecast()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="forecast_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.0"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -29,9 +29,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
AIR_QUALITY_CATEGORY_MAP,
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY_VALUE,
|
||||
ATTR_CATEGORY,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_ENGLISH,
|
||||
ATTR_LEVEL,
|
||||
@@ -39,7 +38,6 @@ from .const import (
|
||||
ATTR_VALUE,
|
||||
ATTRIBUTION,
|
||||
MAX_FORECAST_DAYS,
|
||||
POLLEN_CATEGORY_MAP,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
@@ -61,9 +59,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
AccuWeatherSensorDescription(
|
||||
key="AirQuality",
|
||||
value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]],
|
||||
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(AIR_QUALITY_CATEGORY_MAP.values()),
|
||||
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
|
||||
translation_key="air_quality",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -85,9 +83,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
translation_key="grass_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -111,9 +107,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
translation_key="mold_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -121,9 +115,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
translation_key="ragweed_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -189,18 +181,14 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
translation_key="tree_pollen",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="UVIndex",
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {
|
||||
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||
},
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
translation_key="uv_index_forecast",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
||||
@@ -26,20 +26,10 @@
|
||||
"state": {
|
||||
"good": "Good",
|
||||
"hazardous": "Hazardous",
|
||||
"high": "High",
|
||||
"low": "Low",
|
||||
"moderate": "Moderate",
|
||||
"unhealthy": "Unhealthy",
|
||||
"very_unhealthy": "Very unhealthy"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]",
|
||||
"very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]"
|
||||
}
|
||||
}
|
||||
"unhealthy": "Unhealthy"
|
||||
}
|
||||
},
|
||||
"apparent_temperature": {
|
||||
@@ -72,10 +62,12 @@
|
||||
"level": {
|
||||
"name": "Level",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "Moderate",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,10 +81,12 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -106,15 +100,6 @@
|
||||
"steady": "Steady",
|
||||
"rising": "Rising",
|
||||
"falling": "Falling"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
|
||||
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
|
||||
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ragweed_pollen": {
|
||||
@@ -123,10 +108,12 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -167,10 +154,12 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,10 +170,12 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -195,10 +186,12 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -229,14 +222,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"current_conditions_update_error": {
|
||||
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||
},
|
||||
"forecast_update_error": {
|
||||
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
|
||||
}
|
||||
},
|
||||
"system_health": {
|
||||
"info": {
|
||||
"can_reach_server": "Reach AccuWeather server",
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
"data": {
|
||||
"connection_type": "Select connection type"
|
||||
},
|
||||
"description": "Select connection type. Local requires heaters with Bluetooth"
|
||||
"description": "Select connection type. Local requires heaters with bluetooth"
|
||||
},
|
||||
"local": {
|
||||
"data": {
|
||||
"wifi_ssid": "Wi-Fi SSID",
|
||||
"wifi_pswd": "Wi-Fi password"
|
||||
"wifi_pswd": "Wi-Fi Password"
|
||||
},
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
},
|
||||
"cloud": {
|
||||
"data": {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -15,7 +14,6 @@ from homeassistant.components.climate import (
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||
@@ -51,14 +49,6 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||
ADVANTAGE_AIR_MYFAN = "autoAA"
|
||||
ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode"
|
||||
|
||||
HVAC_ACTIONS = {
|
||||
"cool": HVACAction.COOLING,
|
||||
"heat": HVACAction.HEATING,
|
||||
"vent": HVACAction.FAN,
|
||||
"dry": HVACAction.DRYING,
|
||||
}
|
||||
|
||||
HVAC_MODES = [
|
||||
HVACMode.OFF,
|
||||
@@ -185,17 +175,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running HVAC action."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
if self._ac["mode"] == "myauto":
|
||||
return HVAC_ACTIONS.get(
|
||||
self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF)
|
||||
)
|
||||
return HVAC_ACTIONS.get(self._ac["mode"])
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan modes."""
|
||||
@@ -294,22 +273,6 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
return HVACMode.HEAT_COOL
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%."""
|
||||
if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF:
|
||||
return HVACAction.OFF
|
||||
master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF)
|
||||
if self._ac["mode"] == "myauto":
|
||||
master_action = HVAC_ACTIONS.get(
|
||||
str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF
|
||||
)
|
||||
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
|
||||
if self._zone["value"] <= Decimal(5):
|
||||
return HVACAction.IDLE
|
||||
return master_action
|
||||
return HVACAction.OFF
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
|
||||
@@ -7,4 +7,3 @@ ADVANTAGE_AIR_STATE_CLOSE = "close"
|
||||
ADVANTAGE_AIR_STATE_ON = "on"
|
||||
ADVANTAGE_AIR_STATE_OFF = "off"
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled"
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled"
|
||||
|
||||
@@ -41,7 +41,7 @@ async def async_setup_entry(
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND)
|
||||
)
|
||||
elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door"
|
||||
elif thing["channelDipState"] == 3: # 3 = "Garage door"
|
||||
entities.append(
|
||||
AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE)
|
||||
)
|
||||
|
||||
@@ -9,7 +9,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
from .const import (
|
||||
ADVANTAGE_AIR_AUTOFAN_ENABLED,
|
||||
ADVANTAGE_AIR_NIGHT_MODE_ENABLED,
|
||||
ADVANTAGE_AIR_STATE_OFF,
|
||||
ADVANTAGE_AIR_STATE_ON,
|
||||
)
|
||||
@@ -33,8 +32,6 @@ async def async_setup_entry(
|
||||
entities.append(AdvantageAirFreshAir(instance, ac_key))
|
||||
if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirMyFan(instance, ac_key))
|
||||
if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]:
|
||||
entities.append(AdvantageAirNightMode(instance, ac_key))
|
||||
if things := instance.coordinator.data.get("myThings"):
|
||||
entities.extend(
|
||||
AdvantageAirRelay(instance, thing)
|
||||
@@ -96,32 +93,6 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity):
|
||||
await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity):
|
||||
"""Representation of Advantage 'MySleep$aver' Mode control."""
|
||||
|
||||
_attr_icon = "mdi:weather-night"
|
||||
_attr_name = "MySleep$aver"
|
||||
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||
"""Initialize an Advantage Air Night Mode control."""
|
||||
super().__init__(instance, ac_key)
|
||||
self._attr_unique_id += "-nightmode"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the Night Mode status."""
|
||||
return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode on."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True})
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn Night Mode off."""
|
||||
await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False})
|
||||
|
||||
|
||||
class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity):
|
||||
"""Representation of Advantage Air Thing."""
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "The {integration_title} YAML configuration import failed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to set up {model}?"
|
||||
"description": "Do you want to setup {model}?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
@@ -68,8 +68,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "LED bar mode",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"off": "Off",
|
||||
"co2": "Carbon dioxide",
|
||||
"pm": "Particulate matter"
|
||||
}
|
||||
},
|
||||
@@ -143,8 +143,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "[%key:component::airgradient::entity::select::led_bar_mode::name%]",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"off": "[%key:component::airgradient::entity::select::led_bar_mode::state::off%]",
|
||||
"co2": "[%key:component::airgradient::entity::select::led_bar_mode::state::co2%]",
|
||||
"pm": "[%key:component::airgradient::entity::select::led_bar_mode::state::pm%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -105,14 +105,7 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
try:
|
||||
await measurements.update()
|
||||
except (AirlyError, ClientConnectorError) as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"entry": self.config_entry.title,
|
||||
"error": repr(error),
|
||||
},
|
||||
) from error
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
_LOGGER.debug(
|
||||
"Requests remaining: %s/%s",
|
||||
@@ -133,11 +126,7 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
standards = measurements.current["standards"]
|
||||
|
||||
if index["description"] == NO_AIRLY_SENSORS:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_station",
|
||||
translation_placeholders={"entry": self.config_entry.title},
|
||||
)
|
||||
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
|
||||
for value in values:
|
||||
data[value["name"]] = value["value"]
|
||||
for standard in standards:
|
||||
|
||||
@@ -36,13 +36,5 @@
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
|
||||
},
|
||||
"no_station": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from pyairnow import WebServiceAPI
|
||||
from pyairnow.conv import aqi_to_concentration
|
||||
from pyairnow.errors import AirNowError, InvalidJsonError
|
||||
from pyairnow.errors import AirNowError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
distance=self.distance,
|
||||
)
|
||||
|
||||
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
|
||||
except (AirNowError, ClientConnectorError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
if not obs:
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"radius": "Station radius (miles; optional)"
|
||||
"radius": "Station Radius (miles; optional)"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -25,7 +25,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"radius": "Station radius (miles)"
|
||||
"radius": "Station Radius (miles)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
"name": "Hydrogen fluoride"
|
||||
},
|
||||
"health_index": {
|
||||
"name": "Health index"
|
||||
"name": "Health Index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
@@ -112,10 +112,10 @@
|
||||
"name": "Oxygen"
|
||||
},
|
||||
"performance_index": {
|
||||
"name": "Performance index"
|
||||
"name": "Performance Index"
|
||||
},
|
||||
"hydrogen_phosphide": {
|
||||
"name": "Hydrogen phosphide"
|
||||
"name": "Hydrogen Phosphide"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
@@ -127,22 +127,22 @@
|
||||
"name": "Refrigerant"
|
||||
},
|
||||
"silicon_hydride": {
|
||||
"name": "Silicon hydride"
|
||||
"name": "Silicon Hydride"
|
||||
},
|
||||
"noise": {
|
||||
"name": "Noise"
|
||||
},
|
||||
"maximum_noise": {
|
||||
"name": "Noise (maximum)"
|
||||
"name": "Noise (Maximum)"
|
||||
},
|
||||
"radon": {
|
||||
"name": "Radon"
|
||||
},
|
||||
"industrial_volatile_organic_compounds": {
|
||||
"name": "VOCs (industrial)"
|
||||
"name": "VOCs (Industrial)"
|
||||
},
|
||||
"virus_index": {
|
||||
"name": "Virus index"
|
||||
"name": "Virus Index"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,8 +102,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
except Exception: # noqa: BLE001
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
@@ -161,8 +160,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
except Exception: # noqa: BLE001
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
||||
@@ -32,8 +32,7 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
||||
try:
|
||||
await client.test_connection()
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
except Exception: # noqa: BLE001
|
||||
errors = {"base": "cannot_connect"}
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_HOST])
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"geography_by_coords": {
|
||||
"title": "Configure a geography",
|
||||
"title": "Configure a Geography",
|
||||
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
@@ -16,8 +16,8 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"city": "City",
|
||||
"state": "State",
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
"country": "Country",
|
||||
"state": "State"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -56,12 +56,12 @@
|
||||
"sensor": {
|
||||
"pollutant_label": {
|
||||
"state": {
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
"co": "Carbon Monoxide",
|
||||
"n2": "Nitrogen Dioxide",
|
||||
"o3": "Ozone",
|
||||
"p1": "PM10",
|
||||
"p2": "PM2.5",
|
||||
"s2": "Sulfur Dioxide"
|
||||
}
|
||||
},
|
||||
"pollutant_level": {
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
}
|
||||
|
||||
@@ -9,8 +9,6 @@ from aioairzone.const import (
|
||||
AZD_HUMIDITY,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_UNIT,
|
||||
AZD_THERMOSTAT_BATTERY,
|
||||
AZD_THERMOSTAT_SIGNAL,
|
||||
AZD_WEBSERVER,
|
||||
AZD_WIFI_RSSI,
|
||||
AZD_ZONES,
|
||||
@@ -75,20 +73,6 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
key=AZD_THERMOSTAT_BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
key=AZD_THERMOSTAT_SIGNAL,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="thermostat_signal",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -76,9 +76,6 @@
|
||||
"sensor": {
|
||||
"rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"thermostat_signal": {
|
||||
"name": "Signal strength"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.11"]
|
||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
||||
}
|
||||
|
||||
@@ -32,9 +32,9 @@
|
||||
"air_quality": {
|
||||
"name": "Air Quality mode",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"auto": "Auto"
|
||||
}
|
||||
},
|
||||
"modes": {
|
||||
|
||||
@@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
|
||||
# Fan preset_mode
|
||||
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
||||
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
|
||||
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
||||
|
||||
# Humidifier mode
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["boto3==1.37.1"]
|
||||
"requirements": ["boto3==1.34.131"]
|
||||
}
|
||||
|
||||
@@ -240,7 +240,6 @@ SENSOR_DESCRIPTIONS = (
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDGUSTMPH,
|
||||
|
||||
@@ -609,7 +609,6 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="wind_direction",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR_AVG10M,
|
||||
|
||||
@@ -8,7 +8,7 @@ from python_homeassistant_analytics import (
|
||||
HomeassistantAnalyticsClient,
|
||||
HomeassistantAnalyticsConnectionError,
|
||||
)
|
||||
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
@@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
integrations = await client.get_integrations()
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
@@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
integrations = await client.get_integrations()
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
LOGGER.exception("Error connecting to Home Assistant analytics")
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pydroid-ipcam==3.0.0"]
|
||||
"requirements": ["pydroid-ipcam==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
@@ -85,5 +85,5 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.2.1"],
|
||||
"requirements": ["androidtvremote2==0.2.0"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AndroidTVRemoteConfigEntry
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, DOMAIN
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME
|
||||
from .entity import AndroidTVRemoteBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -233,5 +233,5 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
await asyncio.sleep(delay_secs)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
@@ -54,10 +54,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from anova_wifi import AnovaApi, InvalidLogin
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,10 +11,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Sets up a config flow for Anova."""
|
||||
|
||||
VERSION = 1
|
||||
@@ -39,8 +35,7 @@ class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await api.authenticate()
|
||||
except InvalidLogin:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
except Exception: # noqa: BLE001
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
|
||||
@@ -22,7 +22,6 @@ from . import AnthemavConfigEntry
|
||||
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
VOLUME_STEP = 0.01
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -61,7 +60,6 @@ class AnthemAVR(MediaPlayerEntity):
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
)
|
||||
_attr_volume_step = VOLUME_STEP
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -34,12 +34,10 @@ from .const import (
|
||||
CONF_PROMPT,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -130,29 +128,21 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
else:
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
@@ -166,7 +156,6 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
|
||||
@@ -216,10 +205,6 @@ def anthropic_config_option_schema(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
||||
@@ -13,8 +13,3 @@ CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||
|
||||
@@ -1,32 +1,23 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
import json
|
||||
from typing import Any, Literal, cast
|
||||
from typing import Any, Literal
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
Message,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
@@ -39,7 +30,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import chat_session, device_registry as dr, intent, llm
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -48,15 +39,11 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -84,101 +71,73 @@ def _format_tool(
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
def _message_convert(
|
||||
message: Message,
|
||||
) -> MessageParam:
|
||||
"""Convert from class to TypedDict."""
|
||||
param_content: list[TextBlockParam | ToolUseBlockParam] = []
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
for message_content in message.content:
|
||||
if isinstance(message_content, TextBlock):
|
||||
param_content.append(TextBlockParam(type="text", text=message_content.text))
|
||||
elif isinstance(message_content, ToolUseBlock):
|
||||
param_content.append(
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=message_content.id,
|
||||
name=message_content.name,
|
||||
input=message_content.input,
|
||||
)
|
||||
)
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
return MessageParam(role=message.role, content=param_content)
|
||||
|
||||
return messages
|
||||
|
||||
def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
||||
"""Create tool response content."""
|
||||
if isinstance(chat_content, conversation.ToolResultContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=[
|
||||
ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=chat_content.tool_call_id,
|
||||
content=json.dumps(chat_content.tool_result),
|
||||
)
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.AssistantContent):
|
||||
return MessageParam(
|
||||
role="assistant",
|
||||
content=[
|
||||
TextBlockParam(type="text", text=chat_content.content or ""),
|
||||
*[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in chat_content.tool_calls or ()
|
||||
],
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.UserContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=chat_content.content,
|
||||
)
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
@@ -192,103 +151,44 @@ async def _transform_stream(
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
current_tool_call: dict | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
current_tool_call = {
|
||||
"id": response.content_block.id,
|
||||
"name": response.content_block.name,
|
||||
"input": "",
|
||||
}
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
if current_tool_call is None:
|
||||
raise ValueError("Unexpected delta without a tool call")
|
||||
current_tool_call["input"] += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
LOGGER.debug("yielding delta: %s", response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
tool_block = cast(ToolUseBlockParam, current_block)
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
tool_block["input"] = tool_args
|
||||
if current_tool_call:
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=tool_block["id"],
|
||||
tool_name=tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
id=current_tool_call["id"],
|
||||
tool_name=current_tool_call["name"],
|
||||
tool_args=json.loads(current_tool_call["input"]),
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
current_tool_call = None
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
@@ -326,6 +226,18 @@ class AnthropicConversationEntity(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_process(
|
||||
self, user_input: conversation.ConversationInput
|
||||
) -> conversation.ConversationResult:
|
||||
"""Process a sentence."""
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
) as session,
|
||||
conversation.async_get_chat_log(self.hass, session, user_input) as chat_log,
|
||||
):
|
||||
return await self._async_handle_message(user_input, chat_log)
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -354,50 +266,34 @@ class AnthropicConversationEntity(
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
messages = [_convert_content(content) for content in chat_log.content[1:]]
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
stream = await client.messages.create(
|
||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||
messages=messages,
|
||||
tools=tools or NOT_GIVEN,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
system=system.content,
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
stream=True,
|
||||
)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream, messages)
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
[
|
||||
_convert_content(content)
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
@@ -409,9 +305,7 @@ class AnthropicConversationEntity(
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
response=intent_response, conversation_id=chat_log.conversation_id
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
|
||||
@@ -23,17 +23,12 @@
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
"recommended": "Recommended model settings"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,8 +53,10 @@ class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||
"""Initialize the APCUPSd binary device."""
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
|
||||
@@ -85,16 +85,11 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
self._host = host
|
||||
self._port = port
|
||||
|
||||
@property
|
||||
def unique_device_id(self) -> str:
|
||||
"""Return a unique ID of the device, which is the serial number (if available) or the config entry ID."""
|
||||
return self.data.serial_no or self.config_entry.entry_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.unique_device_id)},
|
||||
identifiers={(DOMAIN, self.data.serial_no or self.config_entry.entry_id)},
|
||||
model=self.data.model,
|
||||
manufacturer="APC",
|
||||
name=self.data.name or "APC UPS",
|
||||
|
||||
@@ -458,8 +458,11 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
# Initial update of attributes.
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"name": "Status date"
|
||||
},
|
||||
"dip_switch_settings": {
|
||||
"name": "DIP switch settings"
|
||||
"name": "Dip switch settings"
|
||||
},
|
||||
"low_battery_signal": {
|
||||
"name": "Low battery signal"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Apollo Automation."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "apollo_automation",
|
||||
"name": "Apollo Automation",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "esphome"
|
||||
}
|
||||
@@ -20,7 +20,6 @@ import voluptuous as vol
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
@@ -382,9 +381,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_IDENTIFIERS: list(combined_identifiers),
|
||||
},
|
||||
)
|
||||
# Don't reload ignored entries or in the middle of reauth,
|
||||
# e.g. if the user is entering a new PIN
|
||||
if entry.source != SOURCE_IGNORE and self.source != SOURCE_REAUTH:
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
||||
@@ -120,7 +120,6 @@ class AppleTvMediaPlayer(
|
||||
"""Initialize the Apple TV media player."""
|
||||
super().__init__(name, identifier, manager)
|
||||
self._playing: Playing | None = None
|
||||
self._playing_last_updated: datetime | None = None
|
||||
self._app_list: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
@@ -210,7 +209,6 @@ class AppleTvMediaPlayer(
|
||||
This is a callback function from pyatv.interface.PushListener.
|
||||
"""
|
||||
self._playing = playstatus
|
||||
self._playing_last_updated = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
@@ -318,7 +316,7 @@ class AppleTvMediaPlayer(
|
||||
def media_position_updated_at(self) -> datetime | None:
|
||||
"""Last valid time of media position."""
|
||||
if self.state in {MediaPlayerState.PLAYING, MediaPlayerState.PAUSED}:
|
||||
return self._playing_last_updated
|
||||
return dt_util.utcnow()
|
||||
return None
|
||||
|
||||
async def async_play_media(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -43,7 +43,6 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
|
||||
config_entry: ApSystemsConfigEntry
|
||||
device_version: str
|
||||
battery_system: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -69,7 +68,6 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
self.battery_system = device_info.isBatterySystem
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.5.0"]
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
}
|
||||
|
||||
@@ -36,8 +36,6 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
super().__init__(data)
|
||||
self._api = data.coordinator.api
|
||||
self._attr_unique_id = f"{data.device_id}_inverter_status"
|
||||
if data.coordinator.battery_system:
|
||||
self._attr_available = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update switch status and availability."""
|
||||
|
||||
@@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
|
||||
@@ -36,9 +36,9 @@
|
||||
"wi_fi_strength": {
|
||||
"name": "Wi-Fi strength",
|
||||
"state": {
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"high": "High"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,11 +6,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -102,7 +98,6 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
|
||||
DEGREE,
|
||||
"mdi:compass",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
]
|
||||
return None
|
||||
@@ -183,7 +178,6 @@ class ArwnSensor(SensorEntity):
|
||||
units: str,
|
||||
icon: str | None = None,
|
||||
device_class: SensorDeviceClass | None = None,
|
||||
state_class: SensorStateClass | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_id = _slug(name)
|
||||
@@ -194,7 +188,6 @@ class ArwnSensor(SensorEntity):
|
||||
self._attr_native_unit_of_measurement = units
|
||||
self._attr_icon = icon
|
||||
self._attr_device_class = device_class
|
||||
self._attr_state_class = state_class
|
||||
|
||||
def set_event(self, event: dict[str, Any]) -> None:
|
||||
"""Update the sensor with the most recent event."""
|
||||
|
||||
@@ -125,7 +125,7 @@ SAVE_DELAY = 10
|
||||
@callback
|
||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||
"""Filter out intents that are not local fallback."""
|
||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -649,7 +649,6 @@ class PipelineRun:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
@@ -1296,7 +1295,6 @@ class PipelineRun:
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
"""Base class for assist satellite entities."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -17,8 +15,6 @@ from .const import (
|
||||
CONNECTION_TEST_DATA,
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
PREANNOUNCE_FILENAME,
|
||||
PREANNOUNCE_URL,
|
||||
AssistSatelliteEntityFeature,
|
||||
)
|
||||
from .entity import (
|
||||
@@ -60,8 +56,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -76,8 +70,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
@@ -90,15 +82,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_register_websocket_api(hass)
|
||||
hass.http.register_view(ConnectionTestView())
|
||||
|
||||
# Default preannounce sound
|
||||
await hass.http.async_register_static_paths(
|
||||
[
|
||||
StaticPathConfig(
|
||||
PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -20,9 +20,6 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||
f"{DOMAIN}_connection_tests"
|
||||
)
|
||||
|
||||
PREANNOUNCE_FILENAME = "preannounce.mp3"
|
||||
PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}"
|
||||
|
||||
|
||||
class AssistSatelliteEntityFeature(IntFlag):
|
||||
"""Supported features of Assist satellite entity."""
|
||||
|
||||
@@ -23,12 +23,15 @@ from homeassistant.components.assist_pipeline import (
|
||||
vad,
|
||||
)
|
||||
from homeassistant.components.media_player import async_process_play_media_url
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.core import Context, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, entity
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature
|
||||
from .const import AssistSatelliteEntityFeature
|
||||
from .errors import AssistSatelliteError, SatelliteBusyError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -95,15 +98,9 @@ class AssistSatelliteAnnouncement:
|
||||
original_media_id: str
|
||||
"""The raw media ID before processing."""
|
||||
|
||||
tts_token: str | None
|
||||
"""The TTS token of the media."""
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"]
|
||||
"""Source of the media ID."""
|
||||
|
||||
preannounce_media_id: str | None = None
|
||||
"""Media ID to be played before announcement."""
|
||||
|
||||
|
||||
class AssistSatelliteEntity(entity.Entity):
|
||||
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
||||
@@ -180,8 +177,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self,
|
||||
message: str | None = None,
|
||||
media_id: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Play and show an announcement on the satellite.
|
||||
|
||||
@@ -191,9 +186,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the announcement.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
|
||||
Calls async_announce with message and media id.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@@ -201,11 +193,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
message,
|
||||
media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
)
|
||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
@@ -232,8 +220,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
@@ -243,9 +229,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the start message or media.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@@ -261,17 +244,13 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message,
|
||||
start_media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
start_message, start_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
|
||||
self._is_announcing = True
|
||||
self._set_state(AssistSatelliteState.RESPONDING)
|
||||
|
||||
# Provide our start info to the LLM so it understands context of incoming message
|
||||
if extra_system_prompt is not None:
|
||||
self._extra_system_prompt = extra_system_prompt
|
||||
@@ -301,7 +280,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
raise
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
async def async_start_conversation(
|
||||
self, start_announcement: AssistSatelliteAnnouncement
|
||||
@@ -492,27 +470,20 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||
|
||||
async def _resolve_announcement_media_id(
|
||||
self,
|
||||
message: str,
|
||||
media_id: str | None,
|
||||
preannounce_media_id: str | None = None,
|
||||
self, message: str, media_id: str | None
|
||||
) -> AssistSatelliteAnnouncement:
|
||||
"""Resolve the media ID."""
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
tts_token: str | None = None
|
||||
|
||||
if media_id:
|
||||
original_media_id = media_id
|
||||
|
||||
else:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine)
|
||||
if engine is None:
|
||||
raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found")
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
@@ -520,23 +491,14 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
stream = tts.async_create_stream(
|
||||
self.hass,
|
||||
engine=engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
stream.async_set_message(message)
|
||||
|
||||
tts_token = stream.token
|
||||
media_id = stream.url
|
||||
original_media_id = tts.generate_media_source_id(
|
||||
media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=engine,
|
||||
engine=pipeline.tts_engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
original_media_id = media_id
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
@@ -554,26 +516,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
# Resolve preannounce media id
|
||||
if preannounce_media_id:
|
||||
if media_source.is_media_source_id(preannounce_media_id):
|
||||
preannounce_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
preannounce_media_id,
|
||||
None,
|
||||
)
|
||||
preannounce_media_id = preannounce_media.url
|
||||
|
||||
# Resolve to full URL
|
||||
preannounce_media_id = async_process_play_media_url(
|
||||
self.hass, preannounce_media_id
|
||||
)
|
||||
|
||||
return AssistSatelliteAnnouncement(
|
||||
message=message,
|
||||
media_id=media_id,
|
||||
original_media_id=original_media_id,
|
||||
tts_token=tts_token,
|
||||
media_id_source=media_id_source,
|
||||
preannounce_media_id=preannounce_media_id,
|
||||
message, media_id, original_media_id, media_id_source
|
||||
)
|
||||
|
||||
Binary file not shown.
@@ -8,22 +8,12 @@ announce:
|
||||
message:
|
||||
required: false
|
||||
example: "Time to wake up!"
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
@@ -34,7 +24,6 @@ start_conversation:
|
||||
start_message:
|
||||
required: false
|
||||
example: "You left the lights on in the living room. Turn them off?"
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
start_media_id:
|
||||
@@ -45,12 +34,3 @@ start_conversation:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -23,14 +23,6 @@
|
||||
"media_id": {
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the announcement."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the announcement."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -49,14 +41,6 @@
|
||||
"extra_system_prompt": {
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the start message or media."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the start message or media."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,8 +198,7 @@ async def websocket_test_connection(
|
||||
|
||||
hass.async_create_background_task(
|
||||
satellite.async_internal_announce(
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
|
||||
preannounce=False,
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}"
|
||||
),
|
||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||
)
|
||||
|
||||
@@ -66,28 +66,28 @@
|
||||
"name": "Upload"
|
||||
},
|
||||
"load_avg_1m": {
|
||||
"name": "Average load (1 min)"
|
||||
"name": "Average load (1m)"
|
||||
},
|
||||
"load_avg_5m": {
|
||||
"name": "Average load (5 min)"
|
||||
"name": "Average load (5m)"
|
||||
},
|
||||
"load_avg_15m": {
|
||||
"name": "Average load (15 min)"
|
||||
"name": "Average load (15m)"
|
||||
},
|
||||
"24ghz_temperature": {
|
||||
"name": "2.4GHz temperature"
|
||||
"name": "2.4GHz Temperature"
|
||||
},
|
||||
"5ghz_temperature": {
|
||||
"name": "5GHz temperature"
|
||||
"name": "5GHz Temperature"
|
||||
},
|
||||
"cpu_temperature": {
|
||||
"name": "CPU temperature"
|
||||
"name": "CPU Temperature"
|
||||
},
|
||||
"5ghz_2_temperature": {
|
||||
"name": "5GHz temperature (Radio 2)"
|
||||
"name": "5GHz Temperature (Radio 2)"
|
||||
},
|
||||
"6ghz_temperature": {
|
||||
"name": "6GHz temperature"
|
||||
"name": "6GHz Temperature"
|
||||
},
|
||||
"cpu_usage": {
|
||||
"name": "CPU usage"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore", "botocore"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aiobotocore==2.21.1", "botocore==1.37.1"]
|
||||
"requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"personal_access_token": "Personal Access Token (PAT)"
|
||||
},
|
||||
"description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.",
|
||||
"title": "Add Azure DevOps project"
|
||||
"title": "Add Azure DevOps Project"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
@@ -32,7 +32,7 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"build_id": {
|
||||
"name": "{definition_name} latest build ID"
|
||||
"name": "{definition_name} latest build id"
|
||||
},
|
||||
"finish_time": {
|
||||
"name": "{definition_name} latest build finish time"
|
||||
@@ -59,7 +59,7 @@
|
||||
"name": "{definition_name} latest build start time"
|
||||
},
|
||||
"url": {
|
||||
"name": "{definition_name} latest build URL"
|
||||
"name": "{definition_name} latest build url"
|
||||
},
|
||||
"work_item_count": {
|
||||
"name": "{item_type} {item_state} work items"
|
||||
@@ -68,7 +68,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token."
|
||||
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,7 +141,7 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Delete a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return
|
||||
await self._client.delete_blob(blob.name)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -163,11 +163,11 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return None
|
||||
|
||||
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
||||
|
||||
@@ -175,8 +175,7 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
blob.metadata is not None
|
||||
and backup_id == blob.metadata.get("backup_id", "")
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -20,12 +18,10 @@ from .agent import (
|
||||
)
|
||||
from .config import BackupConfig, CreateBackupParametersDict
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupManagerError,
|
||||
BackupPlatformEvent,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
@@ -56,7 +52,6 @@ __all__ = [
|
||||
"BackupConfig",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformEvent",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
@@ -79,8 +74,6 @@ __all__ = [
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@@ -135,28 +128,4 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
backup_manager: BackupManager = hass.data[DATA_MANAGER]
|
||||
coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.async_on_unload(coordinator.async_unsubscribe)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -41,8 +41,6 @@ class BackupAgent(abc.ABC):
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
@@ -69,8 +67,6 @@ class BackupAgent(abc.ABC):
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
|
||||
@@ -83,11 +79,8 @@ class BackupAgent(abc.ABC):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
"""Return a backup.
|
||||
|
||||
Raises BackupNotFound if the backup does not exist.
|
||||
"""
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
|
||||
|
||||
class LocalBackupAgent(BackupAgent):
|
||||
|
||||
@@ -88,13 +88,13 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if backup_id not in self._backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return None
|
||||
|
||||
backup, backup_path = self._backups[backup_id]
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
@@ -107,7 +107,7 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
backup_path,
|
||||
)
|
||||
self._backups.pop(backup_id)
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return None
|
||||
|
||||
return backup
|
||||
|
||||
@@ -130,7 +130,10 @@ class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
try:
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
except BackupNotFound:
|
||||
return
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Config flow for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class BackupConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Home Assistant Backup."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_system(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
return self.async_create_entry(title="Backup", data={})
|
||||
@@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
"**/__pycache__/*",
|
||||
"**/.DS_Store",
|
||||
"__pycache__/*",
|
||||
".DS_Store",
|
||||
".HA_RESTORE",
|
||||
"*.db-shm",
|
||||
"*.log.*",
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
"""Coordinator for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import (
|
||||
async_subscribe_events,
|
||||
async_subscribe_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupManagerState,
|
||||
BackupPlatformEvent,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
|
||||
type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupCoordinatorData:
|
||||
"""Class to hold backup data."""
|
||||
|
||||
backup_manager_state: BackupManagerState
|
||||
last_successful_automatic_backup: datetime | None
|
||||
next_scheduled_automatic_backup: datetime | None
|
||||
|
||||
|
||||
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
"""Class to retrieve backup status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
backup_manager: BackupManager,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=None,
|
||||
)
|
||||
self.unsubscribe: list[Callable[[], None]] = [
|
||||
async_subscribe_events(hass, self._on_event),
|
||||
async_subscribe_platform_events(hass, self._on_event),
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
|
||||
@callback
|
||||
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
|
||||
"""Handle new event."""
|
||||
LOGGER.debug("Received backup event: %s", event)
|
||||
self.config_entry.async_create_task(self.hass, self.async_refresh())
|
||||
|
||||
async def _async_update_data(self) -> BackupCoordinatorData:
|
||||
"""Update backup manager data."""
|
||||
return BackupCoordinatorData(
|
||||
self.backup_manager.state,
|
||||
self.backup_manager.config.data.last_completed_automatic_backup,
|
||||
self.backup_manager.config.data.schedule.next_automatic_backup,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_unsubscribe(self) -> None:
|
||||
"""Unsubscribe from events."""
|
||||
for unsub in self.unsubscribe:
|
||||
unsub()
|
||||
@@ -1,27 +0,0 @@
|
||||
"""Diagnostics support for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import BackupConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BackupConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
return {
|
||||
"backup_agents": [
|
||||
{"name": agent.name, "agent_id": agent.agent_id}
|
||||
for agent in coordinator.backup_manager.backup_agents.values()
|
||||
],
|
||||
"backup_config": async_redact_data(
|
||||
coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD]
|
||||
),
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
"""Base for backup entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import __version__ as HA_VERSION
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BackupDataUpdateCoordinator
|
||||
|
||||
|
||||
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
"""Base entity for backup manager."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, "backup_manager")},
|
||||
manufacturer="Home Assistant",
|
||||
model="Home Assistant Backup",
|
||||
sw_version=HA_VERSION,
|
||||
name="Backup",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
configuration_url="homeassistant://config/backup",
|
||||
)
|
||||
@@ -15,7 +15,6 @@ from multidict import istr
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import frame
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import util
|
||||
@@ -60,19 +59,11 @@ class DownloadBackupView(HomeAssistantView):
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
||||
# that
|
||||
if backup is None:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
headers = {
|
||||
@@ -101,8 +92,6 @@ class DownloadBackupView(HomeAssistantView):
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will
|
||||
# handle that
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
return FileResponse(path=path.as_posix(), headers=headers)
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@ from homeassistant.backup_restore import (
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
frame,
|
||||
instance_id,
|
||||
integration_platform,
|
||||
issue_registry as ir,
|
||||
@@ -65,7 +64,6 @@ from .models import (
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
@@ -120,7 +118,6 @@ class BackupManagerState(StrEnum):
|
||||
|
||||
IDLE = "idle"
|
||||
CREATE_BACKUP = "create_backup"
|
||||
BLOCKED = "blocked"
|
||||
RECEIVE_BACKUP = "receive_backup"
|
||||
RESTORE_BACKUP = "restore_backup"
|
||||
|
||||
@@ -229,20 +226,6 @@ class RestoreBackupEvent(ManagerStateEvent):
|
||||
state: RestoreBackupState
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class BackupPlatformEvent:
|
||||
"""Backup platform class."""
|
||||
|
||||
domain: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class BlockedEvent(ManagerStateEvent):
|
||||
"""Backup manager blocked, Home Assistant is starting."""
|
||||
|
||||
manager_state: BackupManagerState = BackupManagerState.BLOCKED
|
||||
|
||||
|
||||
class BackupPlatformProtocol(Protocol):
|
||||
"""Define the format that backup platforms can have."""
|
||||
|
||||
@@ -357,14 +340,11 @@ class BackupManager:
|
||||
self.remove_next_delete_event: Callable[[], None] | None = None
|
||||
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||
self.last_action_event: ManagerStateEvent | None = None
|
||||
self.last_event: ManagerStateEvent = IdleEvent()
|
||||
self.last_non_idle_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
self._backup_platform_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_platform_event_subscriptions
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -374,19 +354,10 @@ class BackupManager:
|
||||
self.known_backups.load(stored["backups"])
|
||||
|
||||
await self._reader_writer.async_validate_config(config=self.config)
|
||||
|
||||
await self._reader_writer.async_resume_restore_progress_after_restart(
|
||||
on_progress=self.async_on_backup_event
|
||||
)
|
||||
|
||||
async def set_manager_idle_after_start(hass: HomeAssistant) -> None:
|
||||
"""Set manager to idle after start."""
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
if self.state == BackupManagerState.BLOCKED:
|
||||
# If we're not finishing a restore job, set the manager to idle after start
|
||||
start.async_at_started(self.hass, set_manager_idle_after_start)
|
||||
|
||||
await self.load_platforms()
|
||||
|
||||
@property
|
||||
@@ -475,9 +446,6 @@ class BackupManager:
|
||||
LOGGER.debug("%s platforms loaded in total", len(self.platforms))
|
||||
LOGGER.debug("%s agents loaded in total", len(self.backup_agents))
|
||||
LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents))
|
||||
event = BackupPlatformEvent(domain=integration_domain)
|
||||
for subscription in self._backup_platform_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
async def async_pre_backup_actions(self) -> None:
|
||||
"""Perform pre backup actions."""
|
||||
@@ -680,8 +648,6 @@ class BackupManager:
|
||||
)
|
||||
for idx, result in enumerate(get_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupNotFound):
|
||||
continue
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
@@ -693,14 +659,7 @@ class BackupManager:
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not result:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
continue
|
||||
if backup is None:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -764,8 +723,6 @@ class BackupManager:
|
||||
)
|
||||
for idx, result in enumerate(delete_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupNotFound):
|
||||
continue
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
@@ -875,7 +832,7 @@ class BackupManager:
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error and not isinstance(error, BackupNotFound)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
@@ -1307,20 +1264,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound as err:
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
) from err
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
@@ -1349,8 +1293,8 @@ class BackupManager:
|
||||
if (current_state := self.state) != (new_state := event.manager_state):
|
||||
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
|
||||
self.last_event = event
|
||||
if not isinstance(event, (BlockedEvent, IdleEvent)):
|
||||
self.last_action_event = event
|
||||
if not isinstance(event, IdleEvent):
|
||||
self.last_non_idle_event = event
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@@ -1408,20 +1352,7 @@ class BackupManager:
|
||||
agent = self.backup_agents[agent_id]
|
||||
except KeyError as err:
|
||||
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
||||
try:
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
except BackupNotFound as err:
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
) from err
|
||||
# Check for None to be backwards compatible with the old BackupAgent API,
|
||||
# this can be removed in HA Core 2025.10
|
||||
if not backup:
|
||||
frame.report_usage(
|
||||
"returns None from BackupAgent.async_get_backup",
|
||||
breaks_in_ha_version="2025.10",
|
||||
integration_domain=agent_id.partition(".")[0],
|
||||
)
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
@@ -1726,9 +1657,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Filter to filter excludes."""
|
||||
|
||||
for exclude in excludes:
|
||||
# The home assistant core configuration directory is added as "data"
|
||||
# in the tar file, so we need to prefix that path to the filters.
|
||||
if not path.full_match(f"data/{exclude}"):
|
||||
if not path.match(exclude):
|
||||
continue
|
||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||
return True
|
||||
|
||||
@@ -5,9 +5,8 @@
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"integration_type": "service",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
|
||||
}
|
||||
|
||||
@@ -1,136 +0,0 @@
|
||||
"""Backup onboarding views."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.onboarding import (
|
||||
BaseOnboardingView,
|
||||
NoAuthBaseOnboardingView,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
|
||||
|
||||
async def async_setup_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the backup views."""
|
||||
|
||||
hass.http.register_view(BackupInfoView(data))
|
||||
hass.http.register_view(RestoreBackupView(data))
|
||||
hass.http.register_view(UploadBackupView(data))
|
||||
|
||||
|
||||
def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
func: Callable[
|
||||
Concatenate[_ViewT, BackupManager, web.Request, _P],
|
||||
Coroutine[Any, Any, web.Response],
|
||||
],
|
||||
) -> Callable[Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]]:
|
||||
"""Home Assistant API decorator to check onboarding and inject manager."""
|
||||
|
||||
@wraps(func)
|
||||
async def with_backup(
|
||||
self: _ViewT,
|
||||
request: web.Request,
|
||||
*args: _P.args,
|
||||
**kwargs: _P.kwargs,
|
||||
) -> web.Response:
|
||||
"""Check admin and call function."""
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
|
||||
class BackupInfoView(NoAuthBaseOnboardingView):
|
||||
"""Get backup info view."""
|
||||
|
||||
url = "/api/onboarding/backup/info"
|
||||
name = "api:onboarding:backup:info"
|
||||
|
||||
@with_backup_manager
|
||||
async def get(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Return backup info."""
|
||||
backups, _ = await manager.async_get_backups()
|
||||
return self.json(
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"state": manager.state,
|
||||
"last_action_event": manager.last_action_event,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class RestoreBackupView(NoAuthBaseOnboardingView):
|
||||
"""Restore backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/restore"
|
||||
name = "api:onboarding:backup:restore"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
}
|
||||
)
|
||||
)
|
||||
@with_backup_manager
|
||||
async def post(
|
||||
self, manager: BackupManager, request: web.Request, data: dict[str, Any]
|
||||
) -> web.Response:
|
||||
"""Restore a backup."""
|
||||
try:
|
||||
await manager.async_restore_backup(
|
||||
data["backup_id"],
|
||||
agent_id=data["agent_id"],
|
||||
password=data.get("password"),
|
||||
restore_addons=data.get("restore_addons"),
|
||||
restore_database=data["restore_database"],
|
||||
restore_folders=data.get("restore_folders"),
|
||||
restore_homeassistant=True,
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
return self.json(
|
||||
{"code": "incorrect_password"}, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return self.json(
|
||||
{"code": "restore_failed", "message": str(err)},
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
return web.Response(status=HTTPStatus.OK)
|
||||
|
||||
|
||||
class UploadBackupView(NoAuthBaseOnboardingView, backup_http.UploadBackupView):
|
||||
"""Upload backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/upload"
|
||||
name = "api:onboarding:backup:upload"
|
||||
|
||||
@with_backup_manager
|
||||
async def post(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Upload a backup file."""
|
||||
return await self._post(request)
|
||||
@@ -1,75 +0,0 @@
|
||||
"""Sensor platform for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import BackupConfigEntry, BackupCoordinatorData
|
||||
from .entity import BackupManagerEntity
|
||||
from .manager import BackupManagerState
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BackupSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description for Home Assistant Backup sensor entities."""
|
||||
|
||||
value_fn: Callable[[BackupCoordinatorData], str | datetime | None]
|
||||
|
||||
|
||||
BACKUP_MANAGER_DESCRIPTIONS = (
|
||||
BackupSensorEntityDescription(
|
||||
key="backup_manager_state",
|
||||
translation_key="backup_manager_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[state.value for state in BackupManagerState],
|
||||
value_fn=lambda data: data.backup_manager_state,
|
||||
),
|
||||
BackupSensorEntityDescription(
|
||||
key="next_scheduled_automatic_backup",
|
||||
translation_key="next_scheduled_automatic_backup",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda data: data.next_scheduled_automatic_backup,
|
||||
),
|
||||
BackupSensorEntityDescription(
|
||||
key="last_successful_automatic_backup",
|
||||
translation_key="last_successful_automatic_backup",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda data: data.last_successful_automatic_backup,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BackupConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Sensor set up for backup config entry."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
BackupManagerSensor(coordinator, description)
|
||||
for description in BACKUP_MANAGER_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerSensor(BackupManagerEntity, SensorEntity):
|
||||
"""Sensor to track backup manager state."""
|
||||
|
||||
entity_description: BackupSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | datetime | None:
|
||||
"""Return native value of entity."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
@@ -22,24 +22,5 @@
|
||||
"name": "Create automatic backup",
|
||||
"description": "Creates a new backup with automatic backup settings."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"backup_manager_state": {
|
||||
"name": "Backup Manager state",
|
||||
"state": {
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"create_backup": "Creating a backup",
|
||||
"receive_backup": "Receiving a backup",
|
||||
"restore_backup": "Restoring a backup"
|
||||
}
|
||||
},
|
||||
"next_scheduled_automatic_backup": {
|
||||
"name": "Next scheduled automatic backup"
|
||||
},
|
||||
"last_successful_automatic_backup": {
|
||||
"name": "Last successful automatic backup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user