mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 05:07:41 +00:00
Merge branch 'dev' into ingress_dropping_close
This commit is contained in:
commit
582761a6ec
26
.github/workflows/builder.yml
vendored
26
.github/workflows/builder.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@ -190,14 +190,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@ -256,14 +256,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@ -330,14 +330,14 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@ -502,7 +502,7 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
118
.github/workflows/ci.yaml
vendored
118
.github/workflows/ci.yaml
vendored
@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 11
|
||||
CACHE_VERSION: 12
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
HA_SHORT_VERSION: "2025.5"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@ -249,13 +249,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@ -271,7 +271,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@ -294,14 +294,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -310,7 +310,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -334,14 +334,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -350,7 +350,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -374,14 +374,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -390,7 +390,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -484,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@ -497,7 +497,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@ -505,7 +505,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@ -552,7 +552,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@ -587,13 +587,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -620,13 +620,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -677,13 +677,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -695,7 +695,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@ -720,13 +720,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -767,13 +767,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -812,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@ -825,7 +825,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -833,7 +833,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@ -889,13 +889,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -907,7 +907,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@ -949,13 +949,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -968,7 +968,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@ -1007,21 +1007,21 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@ -1074,13 +1074,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1138,7 +1138,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1146,7 +1146,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1154,7 +1154,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1208,13 +1208,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1273,7 +1273,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1281,7 +1281,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1289,7 +1289,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1312,7 +1312,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@ -1359,13 +1359,13 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1420,21 +1420,21 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@ -1454,7 +1454,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@ -1479,7 +1479,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.10
|
||||
uses: github/codeql-action/init@v3.28.13
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.10
|
||||
uses: github/codeql-action/analyze@v3.28.13
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
28
.github/workflows/wheels.yml
vendored
28
.github/workflows/wheels.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@ -219,7 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.10
|
||||
rev: v0.11.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
homeassistant.components.braviatv.*
|
||||
homeassistant.components.bring.*
|
||||
homeassistant.components.brother.*
|
||||
@ -412,6 +413,7 @@ homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.remote_calendar.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
homeassistant.components.repairs.*
|
||||
|
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@ -4,7 +4,7 @@
|
||||
{
|
||||
"label": "Run Home Assistant Core",
|
||||
"type": "shell",
|
||||
"command": "hass -c ./config",
|
||||
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
|
10
CODEOWNERS
generated
10
CODEOWNERS
generated
@ -216,6 +216,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/homeassistant/components/bosch_shc/ @tschamm
|
||||
/tests/components/bosch_shc/ @tschamm
|
||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||
@ -570,8 +572,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_cloud/ @lufton @tronikos
|
||||
/homeassistant/components/google_drive/ @tronikos
|
||||
/tests/components/google_drive/ @tronikos
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||
/homeassistant/components/google_mail/ @tkdrob
|
||||
/tests/components/google_mail/ @tkdrob
|
||||
/homeassistant/components/google_photos/ @allenporter
|
||||
@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
/tests/components/pterodactyl/ @elmurato
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
/tests/components/pure_energie/ @klaasnicolaas
|
||||
/homeassistant/components/purpleair/ @bachya
|
||||
@ -1252,6 +1256,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
|
4
Dockerfile
generated
4
Dockerfile
generated
@ -25,13 +25,13 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.6.1
|
||||
RUN pip3 install uv==0.6.10
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -19,4 +19,4 @@ labels:
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache License 2.0
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
|
@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.set_default_verify_paths,
|
||||
object=SSLContext,
|
||||
function="set_default_verify_paths",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.open,
|
||||
object=Path,
|
||||
|
@ -93,6 +93,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .loader import Integration
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
# that it is not part of the public API and should not be used
|
||||
@ -299,14 +300,6 @@ async def async_setup_hass(
|
||||
|
||||
return hass
|
||||
|
||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
||||
"""Stop hass."""
|
||||
# Ask integrations to shut down. It's messy but we can't
|
||||
# do a clean stop without knowing what is broken
|
||||
with contextlib.suppress(TimeoutError):
|
||||
async with hass.timeout.async_timeout(10):
|
||||
await hass.async_stop()
|
||||
|
||||
hass = await create_hass()
|
||||
|
||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||
@ -345,7 +338,7 @@ async def async_setup_hass(
|
||||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
@ -353,7 +346,7 @@ async def async_setup_hass(
|
||||
"Unable to set up core integrations. Activating recovery mode"
|
||||
)
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(
|
||||
@ -368,7 +361,7 @@ async def async_setup_hass(
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
await hass.async_stop(force=True)
|
||||
hass = await create_hass()
|
||||
|
||||
if old_logging:
|
||||
@ -719,20 +712,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
return domains
|
||||
|
||||
|
||||
async def _async_resolve_domains_to_setup(
|
||||
async def _async_resolve_domains_and_preload(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||
"""Resolve all dependencies and return list of domains to set up."""
|
||||
) -> tuple[dict[str, Integration], dict[str, Integration]]:
|
||||
"""Resolve all dependencies and return integrations to set up.
|
||||
|
||||
The return value is a tuple of two dictionaries:
|
||||
- The first dictionary contains integrations
|
||||
specified by the configuration (including config entries).
|
||||
- The second dictionary contains the same integrations as the first dictionary
|
||||
together with all their dependencies.
|
||||
"""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
needed_requirements: set[str] = set()
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to
|
||||
# to `domains_to_setup so they can be setup first instead of
|
||||
# discovering them when later when a config entry setup task
|
||||
# notices its needed and there is already a long line to use
|
||||
# the import executor.
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
#
|
||||
# For example if we have
|
||||
# sensor:
|
||||
@ -748,111 +746,78 @@ async def _async_resolve_domains_to_setup(
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Load manifests for base platforms and platform based integrations
|
||||
# that are defined under base platforms right away since we do not require
|
||||
# the manifest to list them as dependencies and we want to avoid the lock
|
||||
# contention when multiple integrations try to load them at once
|
||||
additional_manifests_to_load = {
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
additional_domains_to_process = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
}
|
||||
|
||||
translations_to_load = additional_manifests_to_load.copy()
|
||||
|
||||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start right-away
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve or additional_manifests_to_load:
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
integrations_or_excs = await loader.async_get_integrations(
|
||||
hass, {*domains_to_setup, *additional_domains_to_process}
|
||||
)
|
||||
# Eliminate those missing or with invalid manifest
|
||||
integrations_to_process = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_or_excs.items()
|
||||
if isinstance(itg, Integration)
|
||||
}
|
||||
integrations_dependencies = await loader.resolve_integrations_dependencies(
|
||||
hass, integrations_to_process.values()
|
||||
)
|
||||
# Eliminate those without valid dependencies
|
||||
integrations_to_process = {
|
||||
domain: integrations_to_process[domain] for domain in integrations_dependencies
|
||||
}
|
||||
|
||||
if additional_manifests_to_load:
|
||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
||||
additional_manifests_to_load.clear()
|
||||
else:
|
||||
to_get = old_to_resolve
|
||||
integrations_to_setup = {
|
||||
domain: itg
|
||||
for domain, itg in integrations_to_process.items()
|
||||
if domain in domains_to_setup
|
||||
}
|
||||
all_integrations_to_setup = integrations_to_setup.copy()
|
||||
all_integrations_to_setup.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep))
|
||||
for domain in integrations_to_setup
|
||||
for dep in integrations_dependencies[domain].difference(
|
||||
all_integrations_to_setup
|
||||
)
|
||||
)
|
||||
|
||||
manifest_deps: set[str] = set()
|
||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
||||
integrations_to_process: list[loader.Integration] = []
|
||||
|
||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
||||
if not isinstance(itg, loader.Integration):
|
||||
continue
|
||||
integration_cache[domain] = itg
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
# Make sure manifests for dependencies are loaded in the next
|
||||
# loop to try to group as many as manifest loads in a single
|
||||
# call to avoid the creating one-off executor jobs later in
|
||||
# the setup process
|
||||
additional_manifests_to_load.update(
|
||||
dep
|
||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
||||
if dep not in integration_cache
|
||||
)
|
||||
|
||||
if domain not in old_to_resolve:
|
||||
continue
|
||||
|
||||
integrations_to_process.append(itg)
|
||||
manifest_deps.update(itg.dependencies)
|
||||
manifest_deps.update(itg.after_dependencies)
|
||||
if not itg.all_dependencies_resolved:
|
||||
resolve_dependencies_tasks.append(
|
||||
create_eager_task(
|
||||
itg.resolve_dependencies(),
|
||||
name=f"resolve dependencies {domain}",
|
||||
loop=hass.loop,
|
||||
)
|
||||
)
|
||||
|
||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
||||
# If there are dependencies, try to preload all
|
||||
# the integrations manifest at once and add them
|
||||
# to the list of requirements we need to install
|
||||
# so we can try to check if they are already installed
|
||||
# in a single call below which avoids each integration
|
||||
# having to wait for the lock to do it individually
|
||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
||||
for dependant_domain, dependant_itg in deps.items():
|
||||
if isinstance(dependant_itg, loader.Integration):
|
||||
integration_cache[dependant_domain] = dependant_itg
|
||||
needed_requirements.update(dependant_itg.requirements)
|
||||
|
||||
if resolve_dependencies_tasks:
|
||||
await asyncio.gather(*resolve_dependencies_tasks)
|
||||
|
||||
for itg in integrations_to_process:
|
||||
try:
|
||||
all_deps = itg.all_dependencies
|
||||
except RuntimeError:
|
||||
# Integration.all_dependencies raises RuntimeError if
|
||||
# dependencies could not be resolved
|
||||
continue
|
||||
for dep in all_deps:
|
||||
if dep in domains_to_setup:
|
||||
continue
|
||||
domains_to_setup.add(dep)
|
||||
to_resolve.add(dep)
|
||||
|
||||
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
||||
# Gather requirements for all integrations,
|
||||
# their dependencies and after dependencies.
|
||||
# To gather all the requirements we must ignore exceptions here.
|
||||
# The exceptions will be detected and handled later in the bootstrap process.
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, integrations_to_process.values(), ignore_exceptions=True
|
||||
)
|
||||
)
|
||||
integrations_requirements = {
|
||||
domain: itg.requirements for domain, itg in integrations_to_process.items()
|
||||
}
|
||||
integrations_requirements.update(
|
||||
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
|
||||
for deps in integrations_after_dependencies.values()
|
||||
for dep in deps.difference(integrations_requirements)
|
||||
)
|
||||
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
|
||||
|
||||
# Optimistically check if requirements are already installed
|
||||
# ahead of setting up the integrations so we can prime the cache
|
||||
# We do not wait for this since its an optimization only
|
||||
# We do not wait for this since it's an optimization only
|
||||
hass.async_create_background_task(
|
||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
||||
requirements.async_load_installed_versions(hass, all_requirements),
|
||||
"check installed requirements",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
#
|
||||
# Only add the domains_to_setup after we finish resolving
|
||||
# as new domains are likely to added in the process
|
||||
#
|
||||
translations_to_load.update(domains_to_setup)
|
||||
# Start loading translations for all integrations we are going to set up
|
||||
# in the background so they are ready when we need them. This avoids a
|
||||
# lot of waiting for the translation load lock and a thundering herd of
|
||||
@ -863,6 +828,7 @@ async def _async_resolve_domains_to_setup(
|
||||
# hold the translation load lock and if anything is fast enough to
|
||||
# wait for the translation load lock, loading will be done by the
|
||||
# time it gets to it.
|
||||
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
|
||||
hass.async_create_background_task(
|
||||
translation.async_load_integrations(hass, translations_to_load),
|
||||
"load translations",
|
||||
@ -874,13 +840,13 @@ async def _async_resolve_domains_to_setup(
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
[*PRELOAD_STORAGE, *all_integrations_to_setup]
|
||||
),
|
||||
"preload storage",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
return domains_to_setup, integration_cache
|
||||
return integrations_to_setup, all_integrations_to_setup
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
@ -890,69 +856,84 @@ async def _async_set_up_integrations(
|
||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||
watcher.async_start()
|
||||
|
||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||
hass, config
|
||||
)
|
||||
stage_2_domains = domains_to_setup.copy()
|
||||
# Detect all cycles
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, all_integrations.values(), set(all_integrations)
|
||||
)
|
||||
)
|
||||
all_domains = set(integrations_after_dependencies)
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in domains_to_setup:
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in domains_to_setup:
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group & domains_to_setup, timeout)
|
||||
(name, domain_group, timeout)
|
||||
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||
),
|
||||
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
|
||||
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
|
||||
("2", domains, STAGE_2_TIMEOUT),
|
||||
]
|
||||
|
||||
_LOGGER.info("Setting up stage 0 and 1")
|
||||
for name, domain_group, timeout in stage_0_and_1_domains:
|
||||
if not domain_group:
|
||||
_LOGGER.info("Setting up stage 0")
|
||||
for name, domain_group, timeout in stages:
|
||||
stage_domains_unfiltered = domain_group & all_domains
|
||||
if not stage_domains_unfiltered:
|
||||
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
|
||||
continue
|
||||
|
||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||
to_be_loaded = domain_group.copy()
|
||||
to_be_loaded.update(
|
||||
stage_domains = stage_domains_unfiltered - hass.config.components
|
||||
if not stage_domains:
|
||||
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
|
||||
continue
|
||||
|
||||
stage_dep_domains_unfiltered = {
|
||||
dep
|
||||
for domain in domain_group
|
||||
if (integration := integration_cache.get(domain)) is not None
|
||||
for dep in integration.all_dependencies
|
||||
for domain in stage_domains
|
||||
for dep in integrations_after_dependencies[domain]
|
||||
if dep not in stage_domains
|
||||
}
|
||||
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
name,
|
||||
stage_domains,
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||
stage_2_domains -= to_be_loaded
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
else:
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, domain_group, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for %s waiting on %s - moving forward",
|
||||
name,
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
# Add after dependencies when setting up stage 2 domains
|
||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
||||
|
||||
if stage_2_domains:
|
||||
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
try:
|
||||
async with hass.timeout.async_timeout(
|
||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||
):
|
||||
await _async_setup_multi_components(hass, stage_2_domains, config)
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
||||
"Setup timed out for stage %s waiting on %s - moving forward",
|
||||
name,
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
|
||||
@ -1054,8 +1035,6 @@ async def _async_setup_multi_components(
|
||||
config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up multiple domains. Log on failure."""
|
||||
# Avoid creating tasks for domains that were setup in a previous stage
|
||||
domains_not_yet_setup = domains - hass.config.components
|
||||
# Create setup tasks for base platforms first since everything will have
|
||||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
@ -1065,9 +1044,7 @@ async def _async_setup_multi_components(
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
)
|
||||
for domain in sorted(
|
||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
||||
)
|
||||
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
|
@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_current_conditions()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="current_conditions_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
@ -121,7 +125,11 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="forecast_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
|
@ -106,6 +106,15 @@
|
||||
"steady": "Steady",
|
||||
"rising": "Rising",
|
||||
"falling": "Falling"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
|
||||
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
|
||||
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ragweed_pollen": {
|
||||
@ -220,6 +229,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"current_conditions_update_error": {
|
||||
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||
},
|
||||
"forecast_update_error": {
|
||||
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
|
||||
}
|
||||
},
|
||||
"system_health": {
|
||||
"info": {
|
||||
"can_reach_server": "Reach AccuWeather server",
|
||||
|
@ -5,14 +5,14 @@
|
||||
"data": {
|
||||
"connection_type": "Select connection type"
|
||||
},
|
||||
"description": "Select connection type. Local requires heaters with bluetooth"
|
||||
"description": "Select connection type. Local requires heaters with Bluetooth"
|
||||
},
|
||||
"local": {
|
||||
"data": {
|
||||
"wifi_ssid": "Wi-Fi SSID",
|
||||
"wifi_pswd": "Wi-Fi Password"
|
||||
"wifi_pswd": "Wi-Fi password"
|
||||
},
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
"description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes."
|
||||
},
|
||||
"cloud": {
|
||||
"data": {
|
||||
|
@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {model}?"
|
||||
"description": "Do you want to set up {model}?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
try:
|
||||
await measurements.update()
|
||||
except (AirlyError, ClientConnectorError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"entry": self.config_entry.title,
|
||||
"error": repr(error),
|
||||
},
|
||||
) from error
|
||||
|
||||
_LOGGER.debug(
|
||||
"Requests remaining: %s/%s",
|
||||
@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
|
||||
standards = measurements.current["standards"]
|
||||
|
||||
if index["description"] == NO_AIRLY_SENSORS:
|
||||
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_station",
|
||||
translation_placeholders={"entry": self.config_entry.title},
|
||||
)
|
||||
for value in values:
|
||||
data[value["name"]] = value["value"]
|
||||
for standard in standards:
|
||||
|
@ -36,5 +36,13 @@
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
|
||||
},
|
||||
"no_station": {
|
||||
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"radius": "Station Radius (miles; optional)"
|
||||
"radius": "Station radius (miles; optional)"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -25,7 +25,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"radius": "Station Radius (miles)"
|
||||
"radius": "Station radius (miles)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +91,7 @@
|
||||
"name": "Hydrogen fluoride"
|
||||
},
|
||||
"health_index": {
|
||||
"name": "Health Index"
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
@ -112,10 +112,10 @@
|
||||
"name": "Oxygen"
|
||||
},
|
||||
"performance_index": {
|
||||
"name": "Performance Index"
|
||||
"name": "Performance index"
|
||||
},
|
||||
"hydrogen_phosphide": {
|
||||
"name": "Hydrogen Phosphide"
|
||||
"name": "Hydrogen phosphide"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
@ -127,22 +127,22 @@
|
||||
"name": "Refrigerant"
|
||||
},
|
||||
"silicon_hydride": {
|
||||
"name": "Silicon Hydride"
|
||||
"name": "Silicon hydride"
|
||||
},
|
||||
"noise": {
|
||||
"name": "Noise"
|
||||
},
|
||||
"maximum_noise": {
|
||||
"name": "Noise (Maximum)"
|
||||
"name": "Noise (maximum)"
|
||||
},
|
||||
"radon": {
|
||||
"name": "Radon"
|
||||
},
|
||||
"industrial_volatile_organic_compounds": {
|
||||
"name": "VOCs (Industrial)"
|
||||
"name": "VOCs (industrial)"
|
||||
},
|
||||
"virus_index": {
|
||||
"name": "Virus Index"
|
||||
"name": "Virus index"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
||||
try:
|
||||
await client.test_connection()
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors = {"base": "cannot_connect"}
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_HOST])
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
||||
"requirements": ["aioairzone-cloud==0.6.11"]
|
||||
}
|
||||
|
@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
|
||||
# Fan preset_mode
|
||||
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
||||
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
|
||||
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
||||
|
||||
# Humidifier mode
|
||||
|
@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDGUSTMPH,
|
||||
|
@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="wind_direction",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR_AVG10M,
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.2.0"],
|
||||
"requirements": ["androidtvremote2==0.2.1"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from anova_wifi import AnovaApi, InvalidLogin
|
||||
import voluptuous as vol
|
||||
|
||||
@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Sets up a config flow for Anova."""
|
||||
|
||||
VERSION = 1
|
||||
@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
await api.authenticate()
|
||||
except InvalidLogin:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
|
@ -22,6 +22,7 @@ from . import AnthemavConfigEntry
|
||||
from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
VOLUME_STEP = 0.01
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity):
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
)
|
||||
_attr_volume_step = VOLUME_STEP
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -34,10 +34,12 @@ from .const import (
|
||||
CONF_PROMPT,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -128,21 +130,29 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
if not errors:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
else:
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
@ -156,6 +166,7 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
|
||||
@ -205,6 +216,10 @@ def anthropic_config_option_schema(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||
|
@ -1,23 +1,32 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
import json
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
Message,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
RedactedThinkingBlockParam,
|
||||
SignatureDelta,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
TextDelta,
|
||||
ThinkingBlock,
|
||||
ThinkingBlockParam,
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
@ -30,7 +39,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@ -39,11 +48,15 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@ -71,73 +84,101 @@ def _format_tool(
|
||||
)
|
||||
|
||||
|
||||
def _message_convert(
|
||||
message: Message,
|
||||
) -> MessageParam:
|
||||
"""Convert from class to TypedDict."""
|
||||
param_content: list[TextBlockParam | ToolUseBlockParam] = []
|
||||
def _convert_content(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[MessageParam]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
messages: list[MessageParam] = []
|
||||
|
||||
for message_content in message.content:
|
||||
if isinstance(message_content, TextBlock):
|
||||
param_content.append(TextBlockParam(type="text", text=message_content.text))
|
||||
elif isinstance(message_content, ToolUseBlock):
|
||||
param_content.append(
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=message_content.id,
|
||||
name=message_content.name,
|
||||
input=message_content.input,
|
||||
)
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
)
|
||||
|
||||
return MessageParam(role=message.role, content=param_content)
|
||||
|
||||
|
||||
def _convert_content(chat_content: conversation.Content) -> MessageParam:
|
||||
"""Create tool response content."""
|
||||
if isinstance(chat_content, conversation.ToolResultContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=[
|
||||
ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=chat_content.tool_call_id,
|
||||
content=json.dumps(chat_content.tool_result),
|
||||
)
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.AssistantContent):
|
||||
return MessageParam(
|
||||
role="assistant",
|
||||
content=[
|
||||
TextBlockParam(type="text", text=chat_content.content or ""),
|
||||
*[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=[tool_result_block],
|
||||
)
|
||||
for tool_call in chat_content.tool_calls or ()
|
||||
],
|
||||
],
|
||||
)
|
||||
if isinstance(chat_content, conversation.UserContent):
|
||||
return MessageParam(
|
||||
role="user",
|
||||
content=chat_content.content,
|
||||
)
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise ValueError(f"Unexpected content type: {type(chat_content)}")
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
tool_result_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined]
|
||||
elif isinstance(content, conversation.UserContent):
|
||||
# Combine consequent user messages
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="user",
|
||||
content=content.content,
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
TextBlockParam(type="text", text=content.content),
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
elif isinstance(content, conversation.AssistantContent):
|
||||
# Combine consequent assistant messages
|
||||
if not messages or messages[-1]["role"] != "assistant":
|
||||
messages.append(
|
||||
MessageParam(
|
||||
role="assistant",
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(type="text", text=content.content)
|
||||
)
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=tool_call.id,
|
||||
name=tool_call.tool_name,
|
||||
input=tool_call.tool_args,
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
- RawMessageStartEvent with no content
|
||||
- RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled)
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- RawContentBlockDeltaEvent with a ThinkingDelta
|
||||
- ...
|
||||
- RawContentBlockDeltaEvent with a SignatureDelta
|
||||
- RawContentBlockStopEvent
|
||||
- RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally)
|
||||
- RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta)
|
||||
- RawContentBlockStartEvent with an empty TextBlock
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
- RawContentBlockDeltaEvent with a TextDelta
|
||||
@ -151,44 +192,103 @@ async def _transform_stream(
|
||||
- RawContentBlockStopEvent
|
||||
- RawMessageDeltaEvent with a stop_reason='tool_use'
|
||||
- RawMessageStopEvent(type='message_stop')
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if result is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
|
||||
current_tool_call: dict | None = None
|
||||
current_message: MessageParam | None = None
|
||||
current_block: (
|
||||
TextBlockParam
|
||||
| ToolUseBlockParam
|
||||
| ThinkingBlockParam
|
||||
| RedactedThinkingBlockParam
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_tool_call = {
|
||||
"id": response.content_block.id,
|
||||
"name": response.content_block.name,
|
||||
"input": "",
|
||||
}
|
||||
current_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
current_block = TextBlockParam(
|
||||
type="text", text=response.content_block.text
|
||||
)
|
||||
yield {"role": "assistant"}
|
||||
if response.content_block.text:
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
current_block = ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=response.content_block.thinking,
|
||||
signature=response.content_block.signature,
|
||||
)
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
current_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking", data=response.content_block.data
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected delta without a block")
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
if current_tool_call is None:
|
||||
raise ValueError("Unexpected delta without a tool call")
|
||||
current_tool_call["input"] += response.delta.partial_json
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
LOGGER.debug("yielding delta: %s", response.delta.text)
|
||||
text_block = cast(TextBlockParam, current_block)
|
||||
text_block["text"] += response.delta.text
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["thinking"] += response.delta.thinking
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
thinking_block["signature"] += response.delta.signature
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_call:
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
tool_block = cast(ToolUseBlockParam, current_block)
|
||||
tool_args = json.loads(current_tool_args)
|
||||
tool_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_call["id"],
|
||||
tool_name=current_tool_call["name"],
|
||||
tool_args=json.loads(current_tool_call["input"]),
|
||||
id=tool_block["id"],
|
||||
tool_name=tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
current_tool_call = None
|
||||
elif current_block["type"] == "thinking":
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
@ -226,18 +326,6 @@ class AnthropicConversationEntity(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_process(
|
||||
self, user_input: conversation.ConversationInput
|
||||
) -> conversation.ConversationResult:
|
||||
"""Process a sentence."""
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
) as session,
|
||||
conversation.async_get_chat_log(self.hass, session, user_input) as chat_log,
|
||||
):
|
||||
return await self._async_handle_message(user_input, chat_log)
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@ -266,34 +354,50 @@ class AnthropicConversationEntity(
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = [_convert_content(content) for content in chat_log.content[1:]]
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
stream = await client.messages.create(
|
||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||
messages=messages,
|
||||
tools=tools or NOT_GIVEN,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
system=system.content,
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
stream=True,
|
||||
model_args = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"tools": tools or NOT_GIVEN,
|
||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
"system": system.content,
|
||||
"stream": True,
|
||||
}
|
||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
type="enabled", budget_tokens=thinking_budget
|
||||
)
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
try:
|
||||
stream = await client.messages.create(**model_args)
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
) from err
|
||||
|
||||
messages.extend(
|
||||
[
|
||||
_convert_content(content)
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream)
|
||||
)
|
||||
]
|
||||
_convert_content(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream, messages)
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
|
@ -23,12 +23,17 @@
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings"
|
||||
"recommended": "Recommended model settings",
|
||||
"thinking_budget_tokens": "Thinking budget"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@
|
||||
"name": "Status date"
|
||||
},
|
||||
"dip_switch_settings": {
|
||||
"name": "Dip switch settings"
|
||||
"name": "DIP switch settings"
|
||||
},
|
||||
"low_battery_signal": {
|
||||
"name": "Low battery signal"
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
|
@ -6,7 +6,11 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
|
||||
DEGREE,
|
||||
"mdi:compass",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
]
|
||||
return None
|
||||
@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity):
|
||||
units: str,
|
||||
icon: str | None = None,
|
||||
device_class: SensorDeviceClass | None = None,
|
||||
state_class: SensorStateClass | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_id = _slug(name)
|
||||
@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity):
|
||||
self._attr_native_unit_of_measurement = units
|
||||
self._attr_icon = icon
|
||||
self._attr_device_class = device_class
|
||||
self._attr_state_class = state_class
|
||||
|
||||
def set_event(self, event: dict[str, Any]) -> None:
|
||||
"""Update the sensor with the most recent event."""
|
||||
|
@ -125,7 +125,7 @@ SAVE_DELAY = 10
|
||||
@callback
|
||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||
"""Filter out intents that are not local fallback."""
|
||||
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
|
||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||
|
||||
|
||||
@callback
|
||||
@ -649,6 +649,7 @@ class PipelineRun:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
@ -1295,6 +1296,7 @@ class PipelineRun:
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
@ -56,6 +56,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@ -70,6 +71,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
|
@ -23,9 +23,6 @@ from homeassistant.components.assist_pipeline import (
|
||||
vad,
|
||||
)
|
||||
from homeassistant.components.media_player import async_process_play_media_url
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.core import Context, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, entity
|
||||
@ -98,9 +95,15 @@ class AssistSatelliteAnnouncement:
|
||||
original_media_id: str
|
||||
"""The raw media ID before processing."""
|
||||
|
||||
tts_token: str | None
|
||||
"""The TTS token of the media."""
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"]
|
||||
"""Source of the media ID."""
|
||||
|
||||
preannounce_media_id: str | None = None
|
||||
"""Media ID to be played before announcement."""
|
||||
|
||||
|
||||
class AssistSatelliteEntity(entity.Entity):
|
||||
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
||||
@ -177,6 +180,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self,
|
||||
message: str | None = None,
|
||||
media_id: str | None = None,
|
||||
preannounce_media_id: str | None = None,
|
||||
) -> None:
|
||||
"""Play and show an announcement on the satellite.
|
||||
|
||||
@ -186,6 +190,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce_media_id is provided, it is played before the announcement.
|
||||
|
||||
Calls async_announce with message and media id.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@ -193,7 +199,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
message, media_id, preannounce_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
@ -220,6 +228,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
preannounce_media_id: str | None = None,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
@ -229,6 +238,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce_media_id is provided, it is played before the announcement.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@ -244,13 +255,15 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message, start_media_id
|
||||
start_message, start_media_id, preannounce_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
|
||||
self._is_announcing = True
|
||||
self._set_state(AssistSatelliteState.RESPONDING)
|
||||
|
||||
# Provide our start info to the LLM so it understands context of incoming message
|
||||
if extra_system_prompt is not None:
|
||||
self._extra_system_prompt = extra_system_prompt
|
||||
@ -280,6 +293,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
raise
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
async def async_start_conversation(
|
||||
self, start_announcement: AssistSatelliteAnnouncement
|
||||
@ -470,20 +484,27 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||
|
||||
async def _resolve_announcement_media_id(
|
||||
self, message: str, media_id: str | None
|
||||
self,
|
||||
message: str,
|
||||
media_id: str | None,
|
||||
preannounce_media_id: str | None = None,
|
||||
) -> AssistSatelliteAnnouncement:
|
||||
"""Resolve the media ID."""
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
tts_token: str | None = None
|
||||
|
||||
if media_id:
|
||||
original_media_id = media_id
|
||||
|
||||
else:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine)
|
||||
if engine is None:
|
||||
raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found")
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
@ -491,14 +512,23 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
media_id = tts_generate_media_source_id(
|
||||
stream = tts.async_create_stream(
|
||||
self.hass,
|
||||
message,
|
||||
engine=pipeline.tts_engine,
|
||||
engine=engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
stream.async_set_message(message)
|
||||
|
||||
tts_token = stream.token
|
||||
media_id = stream.url
|
||||
original_media_id = tts.generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
original_media_id = media_id
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
@ -516,6 +546,26 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
# Resolve preannounce media id
|
||||
if preannounce_media_id:
|
||||
if media_source.is_media_source_id(preannounce_media_id):
|
||||
preannounce_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
preannounce_media_id,
|
||||
None,
|
||||
)
|
||||
preannounce_media_id = preannounce_media.url
|
||||
|
||||
# Resolve to full URL
|
||||
preannounce_media_id = async_process_play_media_url(
|
||||
self.hass, preannounce_media_id
|
||||
)
|
||||
|
||||
return AssistSatelliteAnnouncement(
|
||||
message, media_id, original_media_id, media_id_source
|
||||
message=message,
|
||||
media_id=media_id,
|
||||
original_media_id=original_media_id,
|
||||
tts_token=tts_token,
|
||||
media_id_source=media_id_source,
|
||||
preannounce_media_id=preannounce_media_id,
|
||||
)
|
||||
|
@ -14,6 +14,10 @@ announce:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
@ -34,3 +38,7 @@ start_conversation:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
@ -23,6 +23,10 @@
|
||||
"media_id": {
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the announcement."
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -41,6 +45,10 @@
|
||||
"extra_system_prompt": {
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the start message or media."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -66,28 +66,28 @@
|
||||
"name": "Upload"
|
||||
},
|
||||
"load_avg_1m": {
|
||||
"name": "Average load (1m)"
|
||||
"name": "Average load (1 min)"
|
||||
},
|
||||
"load_avg_5m": {
|
||||
"name": "Average load (5m)"
|
||||
"name": "Average load (5 min)"
|
||||
},
|
||||
"load_avg_15m": {
|
||||
"name": "Average load (15m)"
|
||||
"name": "Average load (15 min)"
|
||||
},
|
||||
"24ghz_temperature": {
|
||||
"name": "2.4GHz Temperature"
|
||||
"name": "2.4GHz temperature"
|
||||
},
|
||||
"5ghz_temperature": {
|
||||
"name": "5GHz Temperature"
|
||||
"name": "5GHz temperature"
|
||||
},
|
||||
"cpu_temperature": {
|
||||
"name": "CPU Temperature"
|
||||
"name": "CPU temperature"
|
||||
},
|
||||
"5ghz_2_temperature": {
|
||||
"name": "5GHz Temperature (Radio 2)"
|
||||
"name": "5GHz temperature (Radio 2)"
|
||||
},
|
||||
"6ghz_temperature": {
|
||||
"name": "6GHz Temperature"
|
||||
"name": "6GHz temperature"
|
||||
},
|
||||
"cpu_usage": {
|
||||
"name": "CPU usage"
|
||||
|
@ -14,7 +14,7 @@
|
||||
"personal_access_token": "Personal Access Token (PAT)"
|
||||
},
|
||||
"description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.",
|
||||
"title": "Add Azure DevOps Project"
|
||||
"title": "Add Azure DevOps project"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
@ -32,7 +32,7 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"build_id": {
|
||||
"name": "{definition_name} latest build id"
|
||||
"name": "{definition_name} latest build ID"
|
||||
},
|
||||
"finish_time": {
|
||||
"name": "{definition_name} latest build finish time"
|
||||
@ -59,7 +59,7 @@
|
||||
"name": "{definition_name} latest build start time"
|
||||
},
|
||||
"url": {
|
||||
"name": "{definition_name} latest build url"
|
||||
"name": "{definition_name} latest build URL"
|
||||
},
|
||||
"work_item_count": {
|
||||
"name": "{item_type} {item_state} work items"
|
||||
@ -68,7 +68,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token."
|
||||
"message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,9 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@ -18,10 +20,12 @@ from .agent import (
|
||||
)
|
||||
from .config import BackupConfig, CreateBackupParametersDict
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupManagerError,
|
||||
BackupPlatformEvent,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
@ -52,6 +56,7 @@ __all__ = [
|
||||
"BackupConfig",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformEvent",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
@ -74,6 +79,8 @@ __all__ = [
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
backup_manager: BackupManager = hass.data[DATA_MANAGER]
|
||||
coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.async_on_unload(coordinator.async_unsubscribe)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
21
homeassistant/components/backup/config_flow.py
Normal file
21
homeassistant/components/backup/config_flow.py
Normal file
@ -0,0 +1,21 @@
|
||||
"""Config flow for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class BackupConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Home Assistant Backup."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_system(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
return self.async_create_entry(title="Backup", data={})
|
@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
"__pycache__/*",
|
||||
".DS_Store",
|
||||
"**/__pycache__/*",
|
||||
"**/.DS_Store",
|
||||
".HA_RESTORE",
|
||||
"*.db-shm",
|
||||
"*.log.*",
|
||||
|
81
homeassistant/components/backup/coordinator.py
Normal file
81
homeassistant/components/backup/coordinator.py
Normal file
@ -0,0 +1,81 @@
|
||||
"""Coordinator for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import (
|
||||
async_subscribe_events,
|
||||
async_subscribe_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupManagerState,
|
||||
BackupPlatformEvent,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
|
||||
type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupCoordinatorData:
|
||||
"""Class to hold backup data."""
|
||||
|
||||
backup_manager_state: BackupManagerState
|
||||
last_successful_automatic_backup: datetime | None
|
||||
next_scheduled_automatic_backup: datetime | None
|
||||
|
||||
|
||||
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
"""Class to retrieve backup status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
backup_manager: BackupManager,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=None,
|
||||
)
|
||||
self.unsubscribe: list[Callable[[], None]] = [
|
||||
async_subscribe_events(hass, self._on_event),
|
||||
async_subscribe_platform_events(hass, self._on_event),
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
|
||||
@callback
|
||||
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
|
||||
"""Handle new event."""
|
||||
LOGGER.debug("Received backup event: %s", event)
|
||||
self.config_entry.async_create_task(self.hass, self.async_refresh())
|
||||
|
||||
async def _async_update_data(self) -> BackupCoordinatorData:
|
||||
"""Update backup manager data."""
|
||||
return BackupCoordinatorData(
|
||||
self.backup_manager.state,
|
||||
self.backup_manager.config.data.last_completed_automatic_backup,
|
||||
self.backup_manager.config.data.schedule.next_automatic_backup,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_unsubscribe(self) -> None:
|
||||
"""Unsubscribe from events."""
|
||||
for unsub in self.unsubscribe:
|
||||
unsub()
|
27
homeassistant/components/backup/diagnostics.py
Normal file
27
homeassistant/components/backup/diagnostics.py
Normal file
@ -0,0 +1,27 @@
|
||||
"""Diagnostics support for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import BackupConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BackupConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
return {
|
||||
"backup_agents": [
|
||||
{"name": agent.name, "agent_id": agent.agent_id}
|
||||
for agent in coordinator.backup_manager.backup_agents.values()
|
||||
],
|
||||
"backup_config": async_redact_data(
|
||||
coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD]
|
||||
),
|
||||
}
|
36
homeassistant/components/backup/entity.py
Normal file
36
homeassistant/components/backup/entity.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""Base for backup entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import __version__ as HA_VERSION
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BackupDataUpdateCoordinator
|
||||
|
||||
|
||||
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
"""Base entity for backup manager."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, "backup_manager")},
|
||||
manufacturer="Home Assistant",
|
||||
model="Home Assistant Backup",
|
||||
sw_version=HA_VERSION,
|
||||
name="Backup",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
configuration_url="homeassistant://config/backup",
|
||||
)
|
@ -120,6 +120,7 @@ class BackupManagerState(StrEnum):
|
||||
|
||||
IDLE = "idle"
|
||||
CREATE_BACKUP = "create_backup"
|
||||
BLOCKED = "blocked"
|
||||
RECEIVE_BACKUP = "receive_backup"
|
||||
RESTORE_BACKUP = "restore_backup"
|
||||
|
||||
@ -228,6 +229,20 @@ class RestoreBackupEvent(ManagerStateEvent):
|
||||
state: RestoreBackupState
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class BackupPlatformEvent:
|
||||
"""Backup platform class."""
|
||||
|
||||
domain: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class BlockedEvent(ManagerStateEvent):
|
||||
"""Backup manager blocked, Home Assistant is starting."""
|
||||
|
||||
manager_state: BackupManagerState = BackupManagerState.BLOCKED
|
||||
|
||||
|
||||
class BackupPlatformProtocol(Protocol):
|
||||
"""Define the format that backup platforms can have."""
|
||||
|
||||
@ -342,11 +357,14 @@ class BackupManager:
|
||||
self.remove_next_delete_event: Callable[[], None] | None = None
|
||||
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = IdleEvent()
|
||||
self.last_non_idle_event: ManagerStateEvent | None = None
|
||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||
self.last_action_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
self._backup_platform_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_platform_event_subscriptions
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@ -356,10 +374,19 @@ class BackupManager:
|
||||
self.known_backups.load(stored["backups"])
|
||||
|
||||
await self._reader_writer.async_validate_config(config=self.config)
|
||||
|
||||
await self._reader_writer.async_resume_restore_progress_after_restart(
|
||||
on_progress=self.async_on_backup_event
|
||||
)
|
||||
|
||||
async def set_manager_idle_after_start(hass: HomeAssistant) -> None:
|
||||
"""Set manager to idle after start."""
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
if self.state == BackupManagerState.BLOCKED:
|
||||
# If we're not finishing a restore job, set the manager to idle after start
|
||||
start.async_at_started(self.hass, set_manager_idle_after_start)
|
||||
|
||||
await self.load_platforms()
|
||||
|
||||
@property
|
||||
@ -448,6 +475,9 @@ class BackupManager:
|
||||
LOGGER.debug("%s platforms loaded in total", len(self.platforms))
|
||||
LOGGER.debug("%s agents loaded in total", len(self.backup_agents))
|
||||
LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents))
|
||||
event = BackupPlatformEvent(domain=integration_domain)
|
||||
for subscription in self._backup_platform_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
async def async_pre_backup_actions(self) -> None:
|
||||
"""Perform pre backup actions."""
|
||||
@ -1319,8 +1349,8 @@ class BackupManager:
|
||||
if (current_state := self.state) != (new_state := event.manager_state):
|
||||
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
|
||||
self.last_event = event
|
||||
if not isinstance(event, IdleEvent):
|
||||
self.last_non_idle_event = event
|
||||
if not isinstance(event, (BlockedEvent, IdleEvent)):
|
||||
self.last_action_event = event
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@ -1696,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Filter to filter excludes."""
|
||||
|
||||
for exclude in excludes:
|
||||
if not path.match(exclude):
|
||||
# The home assistant core configuration directory is added as "data"
|
||||
# in the tar file, so we need to prefix that path to the filters.
|
||||
if not path.full_match(f"data/{exclude}"):
|
||||
continue
|
||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||
return True
|
||||
|
@ -5,8 +5,9 @@
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"integration_type": "system",
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
75
homeassistant/components/backup/sensor.py
Normal file
75
homeassistant/components/backup/sensor.py
Normal file
@ -0,0 +1,75 @@
|
||||
"""Sensor platform for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import BackupConfigEntry, BackupCoordinatorData
|
||||
from .entity import BackupManagerEntity
|
||||
from .manager import BackupManagerState
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BackupSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description for Home Assistant Backup sensor entities."""
|
||||
|
||||
value_fn: Callable[[BackupCoordinatorData], str | datetime | None]
|
||||
|
||||
|
||||
BACKUP_MANAGER_DESCRIPTIONS = (
|
||||
BackupSensorEntityDescription(
|
||||
key="backup_manager_state",
|
||||
translation_key="backup_manager_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[state.value for state in BackupManagerState],
|
||||
value_fn=lambda data: data.backup_manager_state,
|
||||
),
|
||||
BackupSensorEntityDescription(
|
||||
key="next_scheduled_automatic_backup",
|
||||
translation_key="next_scheduled_automatic_backup",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda data: data.next_scheduled_automatic_backup,
|
||||
),
|
||||
BackupSensorEntityDescription(
|
||||
key="last_successful_automatic_backup",
|
||||
translation_key="last_successful_automatic_backup",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda data: data.last_successful_automatic_backup,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BackupConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Sensor set up for backup config entry."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
BackupManagerSensor(coordinator, description)
|
||||
for description in BACKUP_MANAGER_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerSensor(BackupManagerEntity, SensorEntity):
|
||||
"""Sensor to track backup manager state."""
|
||||
|
||||
entity_description: BackupSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | datetime | None:
|
||||
"""Return native value of entity."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
@ -22,5 +22,24 @@
|
||||
"name": "Create automatic backup",
|
||||
"description": "Creates a new backup with automatic backup settings."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"backup_manager_state": {
|
||||
"name": "Backup Manager State",
|
||||
"state": {
|
||||
"idle": "Idle",
|
||||
"create_backup": "Creating a backup",
|
||||
"receive_backup": "Receiving a backup",
|
||||
"restore_backup": "Restoring a backup"
|
||||
}
|
||||
},
|
||||
"next_scheduled_automatic_backup": {
|
||||
"name": "Next scheduled automatic backup"
|
||||
},
|
||||
"last_successful_automatic_backup": {
|
||||
"name": "Last successful automatic backup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ async def handle_info(
|
||||
"backups": list(backups.values()),
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"last_non_idle_event": manager.last_non_idle_event,
|
||||
"last_action_event": manager.last_action_event,
|
||||
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
|
||||
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
|
||||
"state": manager.state,
|
||||
|
@ -23,7 +23,7 @@
|
||||
"entity": {
|
||||
"climate": {
|
||||
"auto_comfort": {
|
||||
"name": "Auto comfort"
|
||||
"name": "Auto Comfort"
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
@ -39,25 +39,25 @@
|
||||
},
|
||||
"number": {
|
||||
"comfort_min_speed": {
|
||||
"name": "Auto Comfort Minimum Speed"
|
||||
"name": "Auto Comfort minimum speed"
|
||||
},
|
||||
"comfort_max_speed": {
|
||||
"name": "Auto Comfort Maximum Speed"
|
||||
"name": "Auto Comfort maximum speed"
|
||||
},
|
||||
"comfort_heat_assist_speed": {
|
||||
"name": "Auto Comfort Heat Assist Speed"
|
||||
"name": "Auto Comfort Heat Assist speed"
|
||||
},
|
||||
"return_to_auto_timeout": {
|
||||
"name": "Return to Auto Timeout"
|
||||
"name": "Return to Auto timeout"
|
||||
},
|
||||
"motion_sense_timeout": {
|
||||
"name": "Motion Sense Timeout"
|
||||
"name": "Motion sense timeout"
|
||||
},
|
||||
"light_return_to_auto_timeout": {
|
||||
"name": "Light Return to Auto Timeout"
|
||||
"name": "Light return to Auto timeout"
|
||||
},
|
||||
"light_auto_motion_timeout": {
|
||||
"name": "Light Motion Sense Timeout"
|
||||
"name": "Light motion sense timeout"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@ -76,10 +76,10 @@
|
||||
},
|
||||
"switch": {
|
||||
"legacy_ir_remote_enable": {
|
||||
"name": "Legacy IR Remote"
|
||||
"name": "Legacy IR remote"
|
||||
},
|
||||
"led_indicators_enable": {
|
||||
"name": "Led Indicators"
|
||||
"name": "LED indicators"
|
||||
},
|
||||
"comfort_heat_assist_enable": {
|
||||
"name": "Auto Comfort Heat Assist"
|
||||
@ -88,10 +88,10 @@
|
||||
"name": "Beep"
|
||||
},
|
||||
"eco_enable": {
|
||||
"name": "Eco Mode"
|
||||
"name": "Eco mode"
|
||||
},
|
||||
"motion_sense_enable": {
|
||||
"name": "Motion Sense"
|
||||
"name": "Motion sense"
|
||||
},
|
||||
"return_to_auto_enable": {
|
||||
"name": "Return to Auto"
|
||||
@ -103,7 +103,7 @@
|
||||
"name": "Dim to Warm"
|
||||
},
|
||||
"light_return_to_auto_enable": {
|
||||
"name": "Light Return to Auto"
|
||||
"name": "Light return to Auto"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -274,7 +274,7 @@
|
||||
"message": "An error occurred while attempting to play {media_type}: {error_message}."
|
||||
},
|
||||
"invalid_grouping_entity": {
|
||||
"message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
|
||||
"message": "Entity with ID {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?"
|
||||
},
|
||||
"invalid_sound_mode": {
|
||||
"message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}."
|
||||
|
@ -75,6 +75,9 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by zeroconf discovery."""
|
||||
# the player can have an ipv6 address, but the api is only available on ipv4
|
||||
if discovery_info.ip_address.version != 4:
|
||||
return self.async_abort(reason="no_ipv4_address")
|
||||
if discovery_info.port is not None:
|
||||
self._port = discovery_info.port
|
||||
|
||||
|
@ -19,7 +19,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"no_ipv4_address": "No IPv4 address found."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
|
@ -18,9 +18,9 @@
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.25.0",
|
||||
"dbus-fast==2.39.3",
|
||||
"habluetooth==3.25.0"
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.26.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.37.0"
|
||||
]
|
||||
}
|
||||
|
62
homeassistant/components/bosch_alarm/__init__.py
Normal file
62
homeassistant/components/bosch_alarm/__init__.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""The Bosch Alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ssl import SSLError
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL]
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
"""Set up Bosch Alarm from a config entry."""
|
||||
|
||||
panel = Panel(
|
||||
host=entry.data[CONF_HOST],
|
||||
port=entry.data[CONF_PORT],
|
||||
automation_code=entry.data.get(CONF_PASSWORD),
|
||||
installer_or_user_code=entry.data.get(
|
||||
CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE)
|
||||
),
|
||||
)
|
||||
try:
|
||||
await panel.connect()
|
||||
except (PermissionError, ValueError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryNotReady from err
|
||||
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryNotReady("Connection failed") from err
|
||||
|
||||
entry.runtime_data = panel
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
model=panel.model,
|
||||
sw_version=panel.firmware_version,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await entry.runtime_data.disconnect()
|
||||
return unload_ok
|
109
homeassistant/components/bosch_alarm/alarm_control_panel.py
Normal file
109
homeassistant/components/bosch_alarm/alarm_control_panel.py
Normal file
@ -0,0 +1,109 @@
|
||||
"""Support for Bosch Alarm Panel."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BoschAlarmConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up control panels for each area."""
|
||||
panel = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AreaAlarmControlPanel(
|
||||
panel,
|
||||
area_id,
|
||||
config_entry.unique_id or config_entry.entry_id,
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
|
||||
class AreaAlarmControlPanel(AlarmControlPanelEntity):
|
||||
"""An alarm control panel entity for a bosch alarm panel."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
_attr_code_arm_required = False
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
|
||||
"""Initialise a Bosch Alarm control panel entity."""
|
||||
self.panel = panel
|
||||
self._area = panel.areas[area_id]
|
||||
self._area_id = area_id
|
||||
self._attr_unique_id = f"{unique_id}_area_{area_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||
name=self._area.name,
|
||||
manufacturer="Bosch Security Systems",
|
||||
via_device=(
|
||||
DOMAIN,
|
||||
unique_id,
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
"""Return the state of the alarm."""
|
||||
if self._area.is_triggered():
|
||||
return AlarmControlPanelState.TRIGGERED
|
||||
if self._area.is_disarmed():
|
||||
return AlarmControlPanelState.DISARMED
|
||||
if self._area.is_arming():
|
||||
return AlarmControlPanelState.ARMING
|
||||
if self._area.is_pending():
|
||||
return AlarmControlPanelState.PENDING
|
||||
if self._area.is_part_armed():
|
||||
return AlarmControlPanelState.ARMED_HOME
|
||||
if self._area.is_all_armed():
|
||||
return AlarmControlPanelState.ARMED_AWAY
|
||||
return None
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Disarm this panel."""
|
||||
await self.panel.area_disarm(self._area_id)
|
||||
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
await self.panel.area_arm_part(self._area_id)
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.panel.area_arm_all(self._area_id)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.panel.connection_status()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity attached to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._area.status_observer.attach(self.schedule_update_ha_state)
|
||||
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
165
homeassistant/components/bosch_alarm/config_flow.py
Normal file
165
homeassistant/components/bosch_alarm/config_flow.py
Normal file
@ -0,0 +1,165 @@
|
||||
"""Config flow for Bosch Alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Any
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
CONF_MODEL,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=7700): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USER_CODE): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_INSTALLER_CODE): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_BG = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str})
|
||||
|
||||
|
||||
async def try_connect(
|
||||
data: dict[str, Any], load_selector: int = 0
|
||||
) -> tuple[str, int | None]:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
panel = Panel(
|
||||
host=data[CONF_HOST],
|
||||
port=data[CONF_PORT],
|
||||
automation_code=data.get(CONF_PASSWORD),
|
||||
installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)),
|
||||
)
|
||||
|
||||
try:
|
||||
await panel.connect(load_selector)
|
||||
finally:
|
||||
await panel.disconnect()
|
||||
|
||||
return (panel.model, panel.serial_number)
|
||||
|
||||
|
||||
class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Bosch Alarm."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init config flow."""
|
||||
|
||||
self._data: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, serial) = await try_connect(user_input, 0)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
asyncio.exceptions.TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._data = user_input
|
||||
self._data[CONF_MODEL] = model
|
||||
return await self.async_step_auth()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the auth step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
# Each model variant requires a different authentication flow
|
||||
if "Solution" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
|
||||
elif "AMAX" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_AMAX
|
||||
else:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_BG
|
||||
|
||||
if user_input is not None:
|
||||
self._data.update(user_input)
|
||||
try:
|
||||
(model, serial_number) = await try_connect(
|
||||
self._data, Panel.LOAD_EXTENDED_INFO
|
||||
)
|
||||
except (PermissionError, ValueError) as e:
|
||||
errors["base"] = "invalid_auth"
|
||||
_LOGGER.error("Authentication Error: %s", e)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(str(serial_number))
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]})
|
||||
return self.async_create_entry(title=f"Bosch {model}", data=self._data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="auth",
|
||||
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||
errors=errors,
|
||||
)
|
6
homeassistant/components/bosch_alarm/const.py
Normal file
6
homeassistant/components/bosch_alarm/const.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""Constants for the Bosch Alarm integration."""
|
||||
|
||||
DOMAIN = "bosch_alarm"
|
||||
HISTORY_ATTR = "history"
|
||||
CONF_INSTALLER_CODE = "installer_code"
|
||||
CONF_USER_CODE = "user_code"
|
11
homeassistant/components/bosch_alarm/manifest.json
Normal file
11
homeassistant/components/bosch_alarm/manifest.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "bosch_alarm",
|
||||
"name": "Bosch Alarm",
|
||||
"codeowners": ["@mag1024", "@sanjay900"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["bosch-alarm-mode2==0.4.3"]
|
||||
}
|
84
homeassistant/components/bosch_alarm/quality_scale.yaml
Normal file
84
homeassistant/components/bosch_alarm/quality_scale.yaml
Normal file
@ -0,0 +1,84 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions defined
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
No polling
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No repairs
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Integration does not make any HTTP requests.
|
||||
strict-typing: done
|
36
homeassistant/components/bosch_alarm/strings.json
Normal file
36
homeassistant/components/bosch_alarm/strings.json
Normal file
@ -0,0 +1,36 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Bosch alarm panel",
|
||||
"port": "The port used to connect to your Bosch alarm panel. This is usually 7700"
|
||||
}
|
||||
},
|
||||
"auth": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"installer_code": "Installer code",
|
||||
"user_code": "User code"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The Mode 2 automation code from your panel",
|
||||
"installer_code": "The installer code from your panel",
|
||||
"user_code": "The user code from your panel"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
}
|
@ -4,10 +4,14 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_EMAIL, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import BringConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_NAME, CONF_EMAIL}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: BringConfigEntry
|
||||
@ -15,7 +19,10 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return {
|
||||
"data": {k: v.to_dict() for k, v in config_entry.runtime_data.data.items()},
|
||||
"data": {
|
||||
k: async_redact_data(v.to_dict(), TO_REDACT)
|
||||
for k, v in config_entry.runtime_data.data.items()
|
||||
},
|
||||
"lists": [lst.to_dict() for lst in config_entry.runtime_data.lists],
|
||||
"user_settings": config_entry.runtime_data.user_settings.to_dict(),
|
||||
}
|
||||
|
@ -77,9 +77,12 @@ class BringEventEntity(BringBaseEntity, EventEntity):
|
||||
attributes = asdict(activity.content)
|
||||
|
||||
attributes["last_activity_by"] = next(
|
||||
x.name
|
||||
for x in bring_list.users.users
|
||||
if x.publicUuid == activity.content.publicUserUuid
|
||||
(
|
||||
x.name
|
||||
for x in bring_list.users.users
|
||||
if x.publicUuid == activity.content.publicUserUuid
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
self._trigger_event(
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bring-api==1.0.2"]
|
||||
"requirements": ["bring-api==1.1.0"]
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
@ -25,7 +26,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
host, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
)
|
||||
except (ConnectionError, SnmpError, TimeoutError) as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={
|
||||
"device": entry.title,
|
||||
"error": repr(error),
|
||||
},
|
||||
) from error
|
||||
|
||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
@ -26,6 +26,7 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]):
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.brother = brother
|
||||
self.device_name = config_entry.title
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
@ -41,5 +42,12 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]):
|
||||
async with timeout(20):
|
||||
data = await self.brother.async_update()
|
||||
except (ConnectionError, SnmpError, UnsupportedModelError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"device": self.device_name,
|
||||
"error": repr(error),
|
||||
},
|
||||
) from error
|
||||
return data
|
||||
|
@ -159,5 +159,13 @@
|
||||
"name": "Last restart"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -170,6 +170,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=DEGREE,
|
||||
icon="mdi:compass-outline",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pressure",
|
||||
|
@ -21,8 +21,8 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"ffmpeg_arguments": "Arguments passed to ffmpeg for cameras",
|
||||
"timeout": "Request Timeout (seconds)"
|
||||
"ffmpeg_arguments": "Arguments passed to FFmpeg for cameras",
|
||||
"timeout": "Request timeout (seconds)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,12 +16,21 @@ from homeassistant.config_entries import (
|
||||
from homeassistant.const import CONF_UUID
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN
|
||||
|
||||
IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
|
||||
KNOWN_HOSTS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
|
||||
KNOWN_HOSTS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_KNOWN_HOSTS,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(custom_value=True, options=[], multiple=True),
|
||||
)
|
||||
}
|
||||
)
|
||||
WANTED_UUID_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
|
||||
|
||||
|
||||
@ -30,12 +39,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow."""
|
||||
self._ignore_cec = set[str]()
|
||||
self._known_hosts = set[str]()
|
||||
self._wanted_uuid = set[str]()
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
@ -62,48 +65,31 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup."""
|
||||
errors = {}
|
||||
data = {CONF_KNOWN_HOSTS: self._known_hosts}
|
||||
|
||||
if user_input is not None:
|
||||
bad_hosts = False
|
||||
known_hosts = user_input[CONF_KNOWN_HOSTS]
|
||||
known_hosts = [x.strip() for x in known_hosts.split(",") if x.strip()]
|
||||
try:
|
||||
known_hosts = KNOWN_HOSTS_SCHEMA(known_hosts)
|
||||
except vol.Invalid:
|
||||
errors["base"] = "invalid_known_hosts"
|
||||
bad_hosts = True
|
||||
else:
|
||||
self._known_hosts = known_hosts
|
||||
data = self._get_data()
|
||||
if not bad_hosts:
|
||||
return self.async_create_entry(title="Google Cast", data=data)
|
||||
known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, []))
|
||||
return self.async_create_entry(
|
||||
title="Google Cast",
|
||||
data=self._get_data(known_hosts=known_hosts),
|
||||
)
|
||||
|
||||
fields = {}
|
||||
fields[vol.Optional(CONF_KNOWN_HOSTS, default="")] = str
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="config", data_schema=vol.Schema(fields), errors=errors
|
||||
)
|
||||
return self.async_show_form(step_id="config", data_schema=KNOWN_HOSTS_SCHEMA)
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup."""
|
||||
|
||||
data = self._get_data()
|
||||
|
||||
if user_input is not None or not onboarding.async_is_onboarded(self.hass):
|
||||
return self.async_create_entry(title="Google Cast", data=data)
|
||||
return self.async_create_entry(title="Google Cast", data=self._get_data())
|
||||
|
||||
return self.async_show_form(step_id="confirm")
|
||||
|
||||
def _get_data(self):
|
||||
def _get_data(
|
||||
self, *, known_hosts: list[str] | None = None
|
||||
) -> dict[str, list[str]]:
|
||||
return {
|
||||
CONF_IGNORE_CEC: list(self._ignore_cec),
|
||||
CONF_KNOWN_HOSTS: list(self._known_hosts),
|
||||
CONF_UUID: list(self._wanted_uuid),
|
||||
CONF_IGNORE_CEC: [],
|
||||
CONF_KNOWN_HOSTS: known_hosts or [],
|
||||
CONF_UUID: [],
|
||||
}
|
||||
|
||||
|
||||
@ -123,31 +109,24 @@ class CastOptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the Google Cast options."""
|
||||
errors: dict[str, str] = {}
|
||||
current_config = self.config_entry.data
|
||||
if user_input is not None:
|
||||
bad_hosts, known_hosts = _string_to_list(
|
||||
user_input.get(CONF_KNOWN_HOSTS, ""), KNOWN_HOSTS_SCHEMA
|
||||
known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, []))
|
||||
self.updated_config = dict(self.config_entry.data)
|
||||
self.updated_config[CONF_KNOWN_HOSTS] = known_hosts
|
||||
|
||||
if self.show_advanced_options:
|
||||
return await self.async_step_advanced_options()
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry, data=self.updated_config
|
||||
)
|
||||
|
||||
if not bad_hosts:
|
||||
self.updated_config = dict(current_config)
|
||||
self.updated_config[CONF_KNOWN_HOSTS] = known_hosts
|
||||
|
||||
if self.show_advanced_options:
|
||||
return await self.async_step_advanced_options()
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry, data=self.updated_config
|
||||
)
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
fields: dict[vol.Marker, type[str]] = {}
|
||||
suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS))
|
||||
_add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value)
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="basic_options",
|
||||
data_schema=vol.Schema(fields),
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
KNOWN_HOSTS_SCHEMA, self.config_entry.data
|
||||
),
|
||||
errors=errors,
|
||||
last_step=not self.show_advanced_options,
|
||||
)
|
||||
@ -206,6 +185,10 @@ def _string_to_list(string, schema):
|
||||
return invalid, items
|
||||
|
||||
|
||||
def _trim_items(items: list[str]) -> list[str]:
|
||||
return [x.strip() for x in items if x.strip()]
|
||||
|
||||
|
||||
def _add_with_suggestion(
|
||||
fields: dict[vol.Marker, type[str]], key: str, suggested_value: str
|
||||
) -> None:
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
from typing import TYPE_CHECKING, NotRequired, TypedDict
|
||||
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
|
||||
@ -46,3 +46,4 @@ class HomeAssistantControllerData(TypedDict):
|
||||
hass_uuid: str
|
||||
client_id: str | None
|
||||
refresh_token: str
|
||||
app_id: NotRequired[str]
|
||||
|
@ -7,6 +7,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
from urllib.parse import urlparse
|
||||
from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
import attr
|
||||
@ -40,7 +41,7 @@ class ChromecastInfo:
|
||||
is_dynamic_group = attr.ib(type=bool | None, default=None)
|
||||
|
||||
@property
|
||||
def friendly_name(self) -> str:
|
||||
def friendly_name(self) -> str | None:
|
||||
"""Return the Friendly Name."""
|
||||
return self.cast_info.friendly_name
|
||||
|
||||
@ -50,7 +51,7 @@ class ChromecastInfo:
|
||||
return self.cast_info.cast_type == CAST_TYPE_GROUP
|
||||
|
||||
@property
|
||||
def uuid(self) -> bool:
|
||||
def uuid(self) -> UUID:
|
||||
"""Return the UUID."""
|
||||
return self.cast_info.uuid
|
||||
|
||||
@ -80,7 +81,7 @@ class ChromecastInfo:
|
||||
"+label%3A%22integration%3A+cast%22"
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
_LOGGER.info(
|
||||
(
|
||||
"Fetched cast details for unknown model '%s' manufacturer:"
|
||||
" '%s', type: '%s'. Please %s"
|
||||
@ -111,7 +112,10 @@ class ChromecastInfo:
|
||||
is_dynamic_group = False
|
||||
http_group_status = None
|
||||
http_group_status = dial.get_multizone_status(
|
||||
None,
|
||||
# We pass services which will be used for the HTTP request, and we
|
||||
# don't care about the host in http_group_status.dynamic_groups so
|
||||
# we pass an empty string to simplify the code.
|
||||
"",
|
||||
services=self.cast_info.services,
|
||||
zconf=ChromeCastZeroconf.get_zeroconf(),
|
||||
)
|
||||
|
@ -14,7 +14,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["casttube", "pychromecast"],
|
||||
"requirements": ["PyChromecast==14.0.5"],
|
||||
"requirements": ["PyChromecast==14.0.7"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_googlecast._tcp.local."]
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ show_lovelace_view:
|
||||
integration: cast
|
||||
domain: media_player
|
||||
dashboard_path:
|
||||
required: true
|
||||
example: lovelace-cast
|
||||
selector:
|
||||
text:
|
||||
view_path:
|
||||
required: true
|
||||
example: downstairs
|
||||
selector:
|
||||
text:
|
||||
|
@ -6,9 +6,11 @@
|
||||
},
|
||||
"config": {
|
||||
"title": "Google Cast configuration",
|
||||
"description": "Known Hosts - A comma-separated list of hostnames or IP-addresses of cast devices, use if mDNS discovery is not working.",
|
||||
"data": {
|
||||
"known_hosts": "Known hosts"
|
||||
"known_hosts": "Add known host"
|
||||
},
|
||||
"data_description": {
|
||||
"known_hosts": "Hostnames or IP-addresses of cast devices, use if mDNS discovery is not working"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -20,9 +22,11 @@
|
||||
"step": {
|
||||
"basic_options": {
|
||||
"title": "[%key:component::cast::config::step::config::title%]",
|
||||
"description": "[%key:component::cast::config::step::config::description%]",
|
||||
"data": {
|
||||
"known_hosts": "[%key:component::cast::config::step::config::data::known_hosts%]"
|
||||
},
|
||||
"data_description": {
|
||||
"known_hosts": "[%key:component::cast::config::step::config::data_description::known_hosts%]"
|
||||
}
|
||||
},
|
||||
"advanced_options": {
|
||||
@ -49,7 +53,7 @@
|
||||
},
|
||||
"dashboard_path": {
|
||||
"name": "Dashboard path",
|
||||
"description": "The URL path of the dashboard to show."
|
||||
"description": "The URL path of the dashboard to show, defaults to lovelace if not specified."
|
||||
},
|
||||
"view_path": {
|
||||
"name": "View path",
|
||||
|
@ -44,7 +44,7 @@ class ChaconDioConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except DIOChaconInvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
|
@ -245,6 +245,10 @@ class CloudLoginView(HomeAssistantView):
|
||||
name = "api:cloud:login"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle login request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
@ -259,7 +263,7 @@ class CloudLoginView(HomeAssistantView):
|
||||
)
|
||||
)
|
||||
)
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle login request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
@ -316,8 +320,12 @@ class CloudLogoutView(HomeAssistantView):
|
||||
name = "api:cloud:logout"
|
||||
|
||||
@require_admin
|
||||
@_handle_cloud_errors
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
async def _post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
@ -400,9 +408,13 @@ class CloudForgotPasswordView(HomeAssistantView):
|
||||
name = "api:cloud:forgot_password"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
return await self._post(request)
|
||||
|
||||
@_handle_cloud_errors
|
||||
@RequestDataValidator(vol.Schema({vol.Required("email"): str}))
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
|
@ -286,7 +286,7 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
return self._language
|
||||
|
||||
@property
|
||||
def default_options(self) -> dict[str, Any]:
|
||||
def default_options(self) -> dict[str, str]:
|
||||
"""Return a dict include default options."""
|
||||
return {
|
||||
ATTR_AUDIO_OUTPUT: AudioOutput.MP3,
|
||||
@ -363,7 +363,7 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
return (None, None)
|
||||
|
||||
return (str(options[ATTR_AUDIO_OUTPUT].value), data)
|
||||
return (options[ATTR_AUDIO_OUTPUT], data)
|
||||
|
||||
|
||||
class CloudProvider(Provider):
|
||||
@ -404,7 +404,7 @@ class CloudProvider(Provider):
|
||||
return [Voice(voice, voice) for voice in voices]
|
||||
|
||||
@property
|
||||
def default_options(self) -> dict[str, Any]:
|
||||
def default_options(self) -> dict[str, str]:
|
||||
"""Return a dict include default options."""
|
||||
return {
|
||||
ATTR_AUDIO_OUTPUT: AudioOutput.MP3,
|
||||
@ -444,7 +444,7 @@ class CloudProvider(Provider):
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
return (None, None)
|
||||
|
||||
return (str(options[ATTR_AUDIO_OUTPUT].value), data)
|
||||
return options[ATTR_AUDIO_OUTPUT], data
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -4,19 +4,19 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Connect to Cloudflare",
|
||||
"description": "This integration requires an API Token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.",
|
||||
"description": "This integration requires an API token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
}
|
||||
},
|
||||
"zone": {
|
||||
"title": "Choose the Zone to Update",
|
||||
"title": "Choose the zone to update",
|
||||
"data": {
|
||||
"zone": "Zone"
|
||||
}
|
||||
},
|
||||
"records": {
|
||||
"title": "Choose the Records to Update",
|
||||
"title": "Choose the records to update",
|
||||
"data": {
|
||||
"records": "Records"
|
||||
}
|
||||
@ -40,7 +40,7 @@
|
||||
"services": {
|
||||
"update_records": {
|
||||
"name": "Update records",
|
||||
"description": "Manually trigger update to Cloudflare records."
|
||||
"description": "Manually triggers an update of Cloudflare records."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AWAY = "away"
|
||||
|
@ -16,6 +16,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -21,8 +21,12 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class ClimaComelitMode(StrEnum):
|
||||
"""Serial Bridge clima modes."""
|
||||
@ -115,13 +119,15 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity
|
||||
# because no serial number or mac is available
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-{device.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(device, device.type)
|
||||
self._update_attributes()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError("Invalid clima data")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
@ -152,6 +158,12 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity
|
||||
|
||||
self._attr_target_temperature = values[4] / 10
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if (
|
||||
@ -165,6 +177,8 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.SET, target_temp
|
||||
)
|
||||
self._attr_target_temperature = target_temp
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
@ -176,3 +190,5 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, MODE_TO_ACTION[hvac_mode]
|
||||
)
|
||||
self._attr_hvac_mode = hvac_mode
|
||||
self.async_write_ha_state()
|
||||
|
@ -9,3 +9,5 @@ _LOGGER = logging.getLogger(__package__)
|
||||
DOMAIN = "comelit"
|
||||
DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
|
||||
SCAN_INTERVAL = 5
|
||||
|
@ -22,7 +22,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL
|
||||
|
||||
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
||||
|
||||
@ -53,7 +53,7 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
logger=_LOGGER,
|
||||
config_entry=entry,
|
||||
name=f"{DOMAIN}-{host}-coordinator",
|
||||
update_interval=timedelta(seconds=5),
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
)
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_get_or_create(
|
||||
|
@ -15,6 +15,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -24,6 +24,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class HumidifierComelitMode(StrEnum):
|
||||
"""Serial Bridge humidifier modes."""
|
||||
@ -121,13 +124,15 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
self._active_mode = active_mode
|
||||
self._active_action = active_action
|
||||
self._set_command = set_command
|
||||
self._update_attributes()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError("Invalid clima data")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
@ -149,6 +154,12 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
self._attr_mode = MODE_AUTO if _automatic else MODE_NORMAL
|
||||
self._attr_target_humidity = values[4] / 10
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
if self.mode == HumidifierComelitMode.OFF:
|
||||
@ -163,12 +174,16 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
self._device.index, HumidifierComelitCommand.SET, humidity
|
||||
)
|
||||
self._attr_target_humidity = humidity
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_mode(self, mode: str) -> None:
|
||||
"""Set humidifier mode."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
self._device.index, MODE_TO_ACTION[mode]
|
||||
)
|
||||
self._attr_mode = mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on."""
|
||||
|
@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.11.2"]
|
||||
"requirements": ["aiocomelit==0.11.3"]
|
||||
}
|
||||
|
@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
SENSOR_BRIDGE_TYPES: Final = (
|
||||
SensorEntityDescription(
|
||||
key="power",
|
||||
|
@ -3,19 +3,25 @@
|
||||
"flow_title": "{host}",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"description": "Please enter the correct PIN for {host}",
|
||||
"data": {
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
},
|
||||
"data_description": {
|
||||
"pin": "The PIN of your Comelit device."
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"type": "Device type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Comelit device."
|
||||
"host": "The hostname or IP address of your Comelit device.",
|
||||
"port": "The port of your Comelit device.",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"type": "The type of your Comelit device."
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -58,6 +64,9 @@
|
||||
"exceptions": {
|
||||
"humidity_while_off": {
|
||||
"message": "Cannot change humidity while off"
|
||||
},
|
||||
"invalid_clima_data": {
|
||||
"message": "Invalid 'clima' data"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Literal
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
@ -91,8 +90,6 @@ __all__ = [
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
|
||||
SERVICE_PROCESS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TEXT): cv.string,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user