diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 346f90fbe4f..ce89d8c2b10 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: translations path: translations.tar.gz @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: translations @@ -190,14 +190,14 @@ jobs: echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE - name: Login to GitHub Container Registry - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2025.02.0 + uses: home-assistant/builder@2025.03.0 with: args: | $BUILD_ARGS \ @@ -256,14 +256,14 @@ jobs: fi - name: Login to GitHub Container Registry - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2025.02.0 + uses: home-assistant/builder@2025.03.0 with: args: | $BUILD_ARGS \ @@ -330,14 +330,14 @@ jobs: - name: Login to DockerHub if: matrix.registry == 'docker.io/homeassistant' - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry if: matrix.registry == 'ghcr.io/home-assistant' - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -457,12 +457,12 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: translations @@ -502,7 +502,7 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Login to GitHub Container Registry - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3f970ce5874..a843133f1a5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,10 +37,10 @@ on: type: boolean env: - CACHE_VERSION: 11 + CACHE_VERSION: 12 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2025.4" + HA_SHORT_VERSION: "2025.5" DEFAULT_PYTHON: "3.13" ALL_PYTHON_VERSIONS: "['3.13']" # 10.3 is the oldest supported version @@ -249,13 +249,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: path: venv key: >- @@ -271,7 +271,7 @@ jobs: uv pip install "$(cat requirements_test.txt | grep pre-commit)" - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} lookup-only: true @@ -294,14 +294,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -310,7 +310,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -334,14 +334,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -350,7 +350,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -374,14 +374,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -390,7 +390,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -484,7 +484,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -497,7 +497,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: path: venv key: >- @@ -505,7 +505,7 @@ jobs: needs.info.outputs.python_cache_key }} - name: Restore uv wheel cache if: steps.cache-venv.outputs.cache-hit != 'true' - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: path: ${{ env.UV_CACHE_DIR }} key: >- @@ -552,7 +552,7 @@ jobs: python --version uv pip freeze >> pip_freeze.txt - name: Upload pip_freeze artifact - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pip-freeze-${{ matrix.python-version }} path: pip_freeze.txt @@ -587,13 +587,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -620,13 +620,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -677,13 +677,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -695,7 +695,7 @@ jobs: . venv/bin/activate python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: licenses-${{ github.run_number }}-${{ matrix.python-version }} path: licenses-${{ matrix.python-version }}.json @@ -720,13 +720,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -767,13 +767,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -812,7 +812,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -825,7 +825,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -833,7 +833,7 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore mypy cache - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: path: .mypy_cache key: >- @@ -889,13 +889,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -907,7 +907,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest_buckets path: pytest_buckets.txt @@ -949,13 +949,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -968,7 +968,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: pytest_buckets - name: Compile English translations @@ -1007,21 +1007,21 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml overwrite: true - name: Upload test results artifact if: needs.info.outputs.skip_coverage != 'true' && !cancelled() - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }} path: junit.xml @@ -1074,13 +1074,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1138,7 +1138,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1146,7 +1146,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1154,7 +1154,7 @@ jobs: overwrite: true - name: Upload test results artifact if: needs.info.outputs.skip_coverage != 'true' && !cancelled() - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: test-results-mariadb-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1208,13 +1208,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1273,7 +1273,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1281,7 +1281,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1289,7 +1289,7 @@ jobs: overwrite: true - name: Upload test results artifact if: needs.info.outputs.skip_coverage != 'true' && !cancelled() - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: test-results-postgres-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1312,7 +1312,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1359,13 +1359,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.2 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1420,21 +1420,21 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml overwrite: true - name: Upload test results artifact if: needs.info.outputs.skip_coverage != 'true' && !cancelled() - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }} path: junit.xml @@ -1454,7 +1454,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1479,7 +1479,7 @@ jobs: timeout-minutes: 10 steps: - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: pattern: test-results-* - name: Upload test results to Codecov diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4bdddf50c25..bd072752d16 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.28.10 + uses: github/codeql-action/init@v3.28.13 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.28.10 + uses: github/codeql-action/analyze@v3.28.13 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 619d83aef51..0b6abe8fe2c 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index c651ccbe715..d27a62bab80 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -91,7 +91,7 @@ jobs: ) > build_constraints.txt - name: Upload env_file - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: env_file path: ./.env_file @@ -99,14 +99,14 @@ jobs: overwrite: true - name: Upload build_constraints - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: build_constraints path: ./build_constraints.txt overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: requirements_diff path: ./requirements_diff.txt @@ -118,7 +118,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt @@ -138,17 +138,17 @@ jobs: uses: actions/checkout@v4.2.2 - name: Download env_file - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: env_file - name: Download build_constraints - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: build_constraints - name: Download requirements_diff - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_diff @@ -159,7 +159,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2025.02.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -187,22 +187,22 @@ jobs: uses: actions/checkout@v4.2.2 - name: Download env_file - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: env_file - name: Download build_constraints - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: build_constraints - name: Download requirements_diff - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_diff - name: Download requirements_all_wheels - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_all_wheels @@ -219,7 +219,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2025.02.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cf6fe7030e9..42e05a869c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.10 + rev: v0.11.0 hooks: - id: ruff args: diff --git a/.strict-typing b/.strict-typing index 56d3e299281..e0c4e569f4b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.* homeassistant.components.bluetooth_tracker.* homeassistant.components.bmw_connected_drive.* homeassistant.components.bond.* +homeassistant.components.bosch_alarm.* homeassistant.components.braviatv.* homeassistant.components.bring.* homeassistant.components.brother.* @@ -412,6 +413,7 @@ homeassistant.components.recollect_waste.* homeassistant.components.recorder.* homeassistant.components.remember_the_milk.* homeassistant.components.remote.* +homeassistant.components.remote_calendar.* homeassistant.components.renault.* homeassistant.components.reolink.* homeassistant.components.repairs.* diff --git a/.vscode/tasks.json b/.vscode/tasks.json index b699ed44b96..09c1d374299 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -4,7 +4,7 @@ { "label": "Run Home Assistant Core", "type": "shell", - "command": "hass -c ./config", + "command": "${command:python.interpreterPath} -m homeassistant -c ./config", "group": "test", "presentation": { "reveal": "always", diff --git a/CODEOWNERS b/CODEOWNERS index 4e8f78ca873..9a8d8b2fc64 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -216,6 +216,8 @@ build.json @home-assistant/supervisor /tests/components/bmw_connected_drive/ @gerard33 @rikroe /homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto /tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto +/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900 +/tests/components/bosch_alarm/ @mag1024 @sanjay900 /homeassistant/components/bosch_shc/ @tschamm /tests/components/bosch_shc/ @tschamm /homeassistant/components/braviatv/ @bieniu @Drafteed @@ -570,8 +572,8 @@ build.json @home-assistant/supervisor /tests/components/google_cloud/ @lufton @tronikos /homeassistant/components/google_drive/ @tronikos /tests/components/google_drive/ @tronikos -/homeassistant/components/google_generative_ai_conversation/ @tronikos -/tests/components/google_generative_ai_conversation/ @tronikos +/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh +/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh /homeassistant/components/google_mail/ @tkdrob /tests/components/google_mail/ @tkdrob /homeassistant/components/google_photos/ @allenporter @@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor /tests/components/prusalink/ @balloob /homeassistant/components/ps4/ @ktnrg45 /tests/components/ps4/ @ktnrg45 +/homeassistant/components/pterodactyl/ @elmurato +/tests/components/pterodactyl/ @elmurato /homeassistant/components/pure_energie/ @klaasnicolaas /tests/components/pure_energie/ @klaasnicolaas /homeassistant/components/purpleair/ @bachya @@ -1252,6 +1256,8 @@ build.json @home-assistant/supervisor /tests/components/refoss/ @ashionky /homeassistant/components/remote/ @home-assistant/core /tests/components/remote/ @home-assistant/core +/homeassistant/components/remote_calendar/ @Thomas55555 +/tests/components/remote_calendar/ @Thomas55555 /homeassistant/components/renault/ @epenet /tests/components/renault/ @epenet /homeassistant/components/renson/ @jimmyd-be diff --git a/Dockerfile b/Dockerfile index 3ab0bb37b9a..0a74e0a3aac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,13 +25,13 @@ RUN \ "armv7") go2rtc_suffix='arm' ;; \ *) go2rtc_suffix=${BUILD_ARCH} ;; \ esac \ - && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ && chmod +x /bin/go2rtc \ # Verify go2rtc can be executed && go2rtc --version # Install uv -RUN pip3 install uv==0.6.1 +RUN pip3 install uv==0.6.10 WORKDIR /usr/src diff --git a/build.yaml b/build.yaml index cd54e410493..87dad1bf5ef 100644 --- a/build.yaml +++ b/build.yaml @@ -19,4 +19,4 @@ labels: org.opencontainers.image.authors: The Home Assistant Authors org.opencontainers.image.url: https://www.home-assistant.io/ org.opencontainers.image.documentation: https://www.home-assistant.io/docs/ - org.opencontainers.image.licenses: Apache License 2.0 + org.opencontainers.image.licenses: Apache-2.0 diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index d224b0b151d..eb81268434b 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( strict_core=False, skip_for_tests=True, ), + BlockingCall( + original_func=SSLContext.set_default_verify_paths, + object=SSLContext, + function="set_default_verify_paths", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), BlockingCall( original_func=Path.open, object=Path, diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 734439842b2..962c7871028 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -93,6 +93,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal from .helpers.storage import get_internal_store_manager from .helpers.system_info import async_get_system_info from .helpers.typing import ConfigType +from .loader import Integration from .setup import ( # _setup_started is marked as protected to make it clear # that it is not part of the public API and should not be used @@ -299,14 +300,6 @@ async def async_setup_hass( return hass - async def stop_hass(hass: core.HomeAssistant) -> None: - """Stop hass.""" - # Ask integrations to shut down. It's messy but we can't - # do a clean stop without knowing what is broken - with contextlib.suppress(TimeoutError): - async with hass.timeout.async_timeout(10): - await hass.async_stop() - hass = await create_hass() if runtime_config.skip_pip or runtime_config.skip_pip_packages: @@ -345,7 +338,7 @@ async def async_setup_hass( if config_dict is None: recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() elif not basic_setup_success: @@ -353,7 +346,7 @@ async def async_setup_hass( "Unable to set up core integrations. Activating recovery mode" ) recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() elif any( @@ -368,7 +361,7 @@ async def async_setup_hass( old_logging = hass.data.get(DATA_LOGGING) recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() if old_logging: @@ -719,20 +712,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]: return domains -async def _async_resolve_domains_to_setup( +async def _async_resolve_domains_and_preload( hass: core.HomeAssistant, config: dict[str, Any] -) -> tuple[set[str], dict[str, loader.Integration]]: - """Resolve all dependencies and return list of domains to set up.""" +) -> tuple[dict[str, Integration], dict[str, Integration]]: + """Resolve all dependencies and return integrations to set up. + + The return value is a tuple of two dictionaries: + - The first dictionary contains integrations + specified by the configuration (including config entries). + - The second dictionary contains the same integrations as the first dictionary + together with all their dependencies. + """ domains_to_setup = _get_domains(hass, config) - needed_requirements: set[str] = set() platform_integrations = conf_util.extract_platform_integrations( config, BASE_PLATFORMS ) - # Ensure base platforms that have platform integrations are added to - # to `domains_to_setup so they can be setup first instead of - # discovering them when later when a config entry setup task - # notices its needed and there is already a long line to use - # the import executor. + # Ensure base platforms that have platform integrations are added to `domains`, + # so they can be setup first instead of discovering them later when a config + # entry setup task notices that it's needed and there is already a long line + # to use the import executor. # # For example if we have # sensor: @@ -748,111 +746,78 @@ async def _async_resolve_domains_to_setup( # so this will be less of a problem in the future. domains_to_setup.update(platform_integrations) - # Load manifests for base platforms and platform based integrations - # that are defined under base platforms right away since we do not require - # the manifest to list them as dependencies and we want to avoid the lock - # contention when multiple integrations try to load them at once - additional_manifests_to_load = { + # Additionally process base platforms since we do not require the manifest + # to list them as dependencies. + # We want to later avoid lock contention when multiple integrations try to load + # their manifests at once. + # Also process integrations that are defined under base platforms + # to speed things up. + additional_domains_to_process = { *BASE_PLATFORMS, *chain.from_iterable(platform_integrations.values()), } - translations_to_load = additional_manifests_to_load.copy() - # Resolve all dependencies so we know all integrations # that will have to be loaded and start right-away - integration_cache: dict[str, loader.Integration] = {} - to_resolve: set[str] = domains_to_setup - while to_resolve or additional_manifests_to_load: - old_to_resolve: set[str] = to_resolve - to_resolve = set() + integrations_or_excs = await loader.async_get_integrations( + hass, {*domains_to_setup, *additional_domains_to_process} + ) + # Eliminate those missing or with invalid manifest + integrations_to_process = { + domain: itg + for domain, itg in integrations_or_excs.items() + if isinstance(itg, Integration) + } + integrations_dependencies = await loader.resolve_integrations_dependencies( + hass, integrations_to_process.values() + ) + # Eliminate those without valid dependencies + integrations_to_process = { + domain: integrations_to_process[domain] for domain in integrations_dependencies + } - if additional_manifests_to_load: - to_get = {*old_to_resolve, *additional_manifests_to_load} - additional_manifests_to_load.clear() - else: - to_get = old_to_resolve + integrations_to_setup = { + domain: itg + for domain, itg in integrations_to_process.items() + if domain in domains_to_setup + } + all_integrations_to_setup = integrations_to_setup.copy() + all_integrations_to_setup.update( + (dep, loader.async_get_loaded_integration(hass, dep)) + for domain in integrations_to_setup + for dep in integrations_dependencies[domain].difference( + all_integrations_to_setup + ) + ) - manifest_deps: set[str] = set() - resolve_dependencies_tasks: list[asyncio.Task[bool]] = [] - integrations_to_process: list[loader.Integration] = [] - - for domain, itg in (await loader.async_get_integrations(hass, to_get)).items(): - if not isinstance(itg, loader.Integration): - continue - integration_cache[domain] = itg - needed_requirements.update(itg.requirements) - - # Make sure manifests for dependencies are loaded in the next - # loop to try to group as many as manifest loads in a single - # call to avoid the creating one-off executor jobs later in - # the setup process - additional_manifests_to_load.update( - dep - for dep in chain(itg.dependencies, itg.after_dependencies) - if dep not in integration_cache - ) - - if domain not in old_to_resolve: - continue - - integrations_to_process.append(itg) - manifest_deps.update(itg.dependencies) - manifest_deps.update(itg.after_dependencies) - if not itg.all_dependencies_resolved: - resolve_dependencies_tasks.append( - create_eager_task( - itg.resolve_dependencies(), - name=f"resolve dependencies {domain}", - loop=hass.loop, - ) - ) - - if unseen_deps := manifest_deps - integration_cache.keys(): - # If there are dependencies, try to preload all - # the integrations manifest at once and add them - # to the list of requirements we need to install - # so we can try to check if they are already installed - # in a single call below which avoids each integration - # having to wait for the lock to do it individually - deps = await loader.async_get_integrations(hass, unseen_deps) - for dependant_domain, dependant_itg in deps.items(): - if isinstance(dependant_itg, loader.Integration): - integration_cache[dependant_domain] = dependant_itg - needed_requirements.update(dependant_itg.requirements) - - if resolve_dependencies_tasks: - await asyncio.gather(*resolve_dependencies_tasks) - - for itg in integrations_to_process: - try: - all_deps = itg.all_dependencies - except RuntimeError: - # Integration.all_dependencies raises RuntimeError if - # dependencies could not be resolved - continue - for dep in all_deps: - if dep in domains_to_setup: - continue - domains_to_setup.add(dep) - to_resolve.add(dep) - - _LOGGER.info("Domains to be set up: %s", domains_to_setup) + # Gather requirements for all integrations, + # their dependencies and after dependencies. + # To gather all the requirements we must ignore exceptions here. + # The exceptions will be detected and handled later in the bootstrap process. + integrations_after_dependencies = ( + await loader.resolve_integrations_after_dependencies( + hass, integrations_to_process.values(), ignore_exceptions=True + ) + ) + integrations_requirements = { + domain: itg.requirements for domain, itg in integrations_to_process.items() + } + integrations_requirements.update( + (dep, loader.async_get_loaded_integration(hass, dep).requirements) + for deps in integrations_after_dependencies.values() + for dep in deps.difference(integrations_requirements) + ) + all_requirements = set(chain.from_iterable(integrations_requirements.values())) # Optimistically check if requirements are already installed # ahead of setting up the integrations so we can prime the cache - # We do not wait for this since its an optimization only + # We do not wait for this since it's an optimization only hass.async_create_background_task( - requirements.async_load_installed_versions(hass, needed_requirements), + requirements.async_load_installed_versions(hass, all_requirements), "check installed requirements", eager_start=True, ) - # - # Only add the domains_to_setup after we finish resolving - # as new domains are likely to added in the process - # - translations_to_load.update(domains_to_setup) # Start loading translations for all integrations we are going to set up # in the background so they are ready when we need them. This avoids a # lot of waiting for the translation load lock and a thundering herd of @@ -863,6 +828,7 @@ async def _async_resolve_domains_to_setup( # hold the translation load lock and if anything is fast enough to # wait for the translation load lock, loading will be done by the # time it gets to it. + translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process} hass.async_create_background_task( translation.async_load_integrations(hass, translations_to_load), "load translations", @@ -874,13 +840,13 @@ async def _async_resolve_domains_to_setup( # in the setup process. hass.async_create_background_task( get_internal_store_manager(hass).async_preload( - [*PRELOAD_STORAGE, *domains_to_setup] + [*PRELOAD_STORAGE, *all_integrations_to_setup] ), "preload storage", eager_start=True, ) - return domains_to_setup, integration_cache + return integrations_to_setup, all_integrations_to_setup async def _async_set_up_integrations( @@ -890,69 +856,84 @@ async def _async_set_up_integrations( watcher = _WatchPendingSetups(hass, _setup_started(hass)) watcher.async_start() - domains_to_setup, integration_cache = await _async_resolve_domains_to_setup( + integrations, all_integrations = await _async_resolve_domains_and_preload( hass, config ) - stage_2_domains = domains_to_setup.copy() + # Detect all cycles + integrations_after_dependencies = ( + await loader.resolve_integrations_after_dependencies( + hass, all_integrations.values(), set(all_integrations) + ) + ) + all_domains = set(integrations_after_dependencies) + domains = set(integrations) & all_domains + + _LOGGER.info( + "Domains to be set up: %s | %s", + domains, + all_domains - domains, + ) + + async_set_domains_to_be_loaded(hass, all_domains) # Initialize recorder - if "recorder" in domains_to_setup: + if "recorder" in all_domains: recorder.async_initialize_recorder(hass) # Initialize backup - if "backup" in domains_to_setup: + if "backup" in all_domains: backup.async_initialize_backup(hass) - stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [ + stages: list[tuple[str, set[str], int | None]] = [ *( - (name, domain_group & domains_to_setup, timeout) + (name, domain_group, timeout) for name, domain_group, timeout in STAGE_0_INTEGRATIONS ), - ("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT), + ("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT), + ("2", domains, STAGE_2_TIMEOUT), ] - _LOGGER.info("Setting up stage 0 and 1") - for name, domain_group, timeout in stage_0_and_1_domains: - if not domain_group: + _LOGGER.info("Setting up stage 0") + for name, domain_group, timeout in stages: + stage_domains_unfiltered = domain_group & all_domains + if not stage_domains_unfiltered: + _LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group) continue - _LOGGER.info("Setting up %s: %s", name, domain_group) - to_be_loaded = domain_group.copy() - to_be_loaded.update( + stage_domains = stage_domains_unfiltered - hass.config.components + if not stage_domains: + _LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered) + continue + + stage_dep_domains_unfiltered = { dep - for domain in domain_group - if (integration := integration_cache.get(domain)) is not None - for dep in integration.all_dependencies + for domain in stage_domains + for dep in integrations_after_dependencies[domain] + if dep not in stage_domains + } + stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components + + stage_all_domains = stage_domains | stage_dep_domains + + _LOGGER.info( + "Setting up stage %s: %s | %s\nDependencies: %s | %s", + name, + stage_domains, + stage_domains_unfiltered - stage_domains, + stage_dep_domains, + stage_dep_domains_unfiltered - stage_dep_domains, ) - async_set_domains_to_be_loaded(hass, to_be_loaded) - stage_2_domains -= to_be_loaded if timeout is None: - await _async_setup_multi_components(hass, domain_group, config) - else: - try: - async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME): - await _async_setup_multi_components(hass, domain_group, config) - except TimeoutError: - _LOGGER.warning( - "Setup timed out for %s waiting on %s - moving forward", - name, - hass._active_tasks, # noqa: SLF001 - ) - - # Add after dependencies when setting up stage 2 domains - async_set_domains_to_be_loaded(hass, stage_2_domains) - - if stage_2_domains: - _LOGGER.info("Setting up stage 2: %s", stage_2_domains) + await _async_setup_multi_components(hass, stage_all_domains, config) + continue try: - async with hass.timeout.async_timeout( - STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME - ): - await _async_setup_multi_components(hass, stage_2_domains, config) + async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME): + await _async_setup_multi_components(hass, stage_all_domains, config) except TimeoutError: _LOGGER.warning( - "Setup timed out for stage 2 waiting on %s - moving forward", + "Setup timed out for stage %s waiting on %s - moving forward", + name, hass._active_tasks, # noqa: SLF001 ) @@ -1054,8 +1035,6 @@ async def _async_setup_multi_components( config: dict[str, Any], ) -> None: """Set up multiple domains. Log on failure.""" - # Avoid creating tasks for domains that were setup in a previous stage - domains_not_yet_setup = domains - hass.config.components # Create setup tasks for base platforms first since everything will have # to wait to be imported, and the sooner we can get the base platforms # loaded the sooner we can start loading the rest of the integrations. @@ -1065,9 +1044,7 @@ async def _async_setup_multi_components( f"setup component {domain}", eager_start=True, ) - for domain in sorted( - domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True - ) + for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True) } results = await asyncio.gather(*futures.values(), return_exceptions=True) for idx, domain in enumerate(futures): diff --git a/homeassistant/brands/motionblinds.json b/homeassistant/brands/motionblinds.json index 67013e75966..5a48b573b4d 100644 --- a/homeassistant/brands/motionblinds.json +++ b/homeassistant/brands/motionblinds.json @@ -1,5 +1,6 @@ { "domain": "motionblinds", "name": "Motionblinds", - "integrations": ["motion_blinds", "motionblinds_ble"] + "integrations": ["motion_blinds", "motionblinds_ble"], + "iot_standards": ["matter"] } diff --git a/homeassistant/components/accuweather/coordinator.py b/homeassistant/components/accuweather/coordinator.py index 67e3e2ad76e..780c977f930 100644 --- a/homeassistant/components/accuweather/coordinator.py +++ b/homeassistant/components/accuweather/coordinator.py @@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator( async with timeout(10): result = await self.accuweather.async_get_current_conditions() except EXCEPTIONS as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="current_conditions_update_error", + translation_placeholders={"error": repr(error)}, + ) from error _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) @@ -121,7 +125,11 @@ class AccuWeatherDailyForecastDataUpdateCoordinator( language=self.hass.config.language ) except EXCEPTIONS as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="forecast_update_error", + translation_placeholders={"error": repr(error)}, + ) from error _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) diff --git a/homeassistant/components/accuweather/strings.json b/homeassistant/components/accuweather/strings.json index d9777352b93..e1a71c5e1a5 100644 --- a/homeassistant/components/accuweather/strings.json +++ b/homeassistant/components/accuweather/strings.json @@ -106,6 +106,15 @@ "steady": "Steady", "rising": "Rising", "falling": "Falling" + }, + "state_attributes": { + "options": { + "state": { + "falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]", + "rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]", + "steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]" + } + } } }, "ragweed_pollen": { @@ -220,6 +229,14 @@ } } }, + "exceptions": { + "current_conditions_update_error": { + "message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}" + }, + "forecast_update_error": { + "message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}" + } + }, "system_health": { "info": { "can_reach_server": "Reach AccuWeather server", diff --git a/homeassistant/components/adax/strings.json b/homeassistant/components/adax/strings.json index 6157b7dfc91..9ba497a9aca 100644 --- a/homeassistant/components/adax/strings.json +++ b/homeassistant/components/adax/strings.json @@ -5,14 +5,14 @@ "data": { "connection_type": "Select connection type" }, - "description": "Select connection type. Local requires heaters with bluetooth" + "description": "Select connection type. Local requires heaters with Bluetooth" }, "local": { "data": { "wifi_ssid": "Wi-Fi SSID", - "wifi_pswd": "Wi-Fi Password" + "wifi_pswd": "Wi-Fi password" }, - "description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes." + "description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes." }, "cloud": { "data": { diff --git a/homeassistant/components/airgradient/strings.json b/homeassistant/components/airgradient/strings.json index 4cf3a6a34ea..2d9b6be529d 100644 --- a/homeassistant/components/airgradient/strings.json +++ b/homeassistant/components/airgradient/strings.json @@ -11,7 +11,7 @@ } }, "discovery_confirm": { - "description": "Do you want to setup {model}?" + "description": "Do you want to set up {model}?" } }, "abort": { diff --git a/homeassistant/components/airly/coordinator.py b/homeassistant/components/airly/coordinator.py index b255c5f078f..668cabdae63 100644 --- a/homeassistant/components/airly/coordinator.py +++ b/homeassistant/components/airly/coordinator.py @@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i try: await measurements.update() except (AirlyError, ClientConnectorError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(error), + }, + ) from error _LOGGER.debug( "Requests remaining: %s/%s", @@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i standards = measurements.current["standards"] if index["description"] == NO_AIRLY_SENSORS: - raise UpdateFailed("Can't retrieve data: no Airly sensors in this area") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="no_station", + translation_placeholders={"entry": self.config_entry.title}, + ) for value in values: data[value["name"]] = value["value"] for standard in standards: diff --git a/homeassistant/components/airly/strings.json b/homeassistant/components/airly/strings.json index 33ee8bbe4c9..fe4ccbb4745 100644 --- a/homeassistant/components/airly/strings.json +++ b/homeassistant/components/airly/strings.json @@ -36,5 +36,13 @@ "name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]" } } + }, + "exceptions": { + "update_error": { + "message": "An error occurred while retrieving data from the Airly API for {entry}: {error}" + }, + "no_station": { + "message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area" + } } } diff --git a/homeassistant/components/airnow/strings.json b/homeassistant/components/airnow/strings.json index d5fb22106f9..a69f67948cb 100644 --- a/homeassistant/components/airnow/strings.json +++ b/homeassistant/components/airnow/strings.json @@ -7,7 +7,7 @@ "api_key": "[%key:common::config_flow::data::api_key%]", "latitude": "[%key:common::config_flow::data::latitude%]", "longitude": "[%key:common::config_flow::data::longitude%]", - "radius": "Station Radius (miles; optional)" + "radius": "Station radius (miles; optional)" } } }, @@ -25,7 +25,7 @@ "step": { "init": { "data": { - "radius": "Station Radius (miles)" + "radius": "Station radius (miles)" } } } diff --git a/homeassistant/components/airq/strings.json b/homeassistant/components/airq/strings.json index 26b944467e6..9c16975a3ab 100644 --- a/homeassistant/components/airq/strings.json +++ b/homeassistant/components/airq/strings.json @@ -91,7 +91,7 @@ "name": "Hydrogen fluoride" }, "health_index": { - "name": "Health Index" + "name": "Health index" }, "absolute_humidity": { "name": "Absolute humidity" @@ -112,10 +112,10 @@ "name": "Oxygen" }, "performance_index": { - "name": "Performance Index" + "name": "Performance index" }, "hydrogen_phosphide": { - "name": "Hydrogen Phosphide" + "name": "Hydrogen phosphide" }, "relative_pressure": { "name": "Relative pressure" @@ -127,22 +127,22 @@ "name": "Refrigerant" }, "silicon_hydride": { - "name": "Silicon Hydride" + "name": "Silicon hydride" }, "noise": { "name": "Noise" }, "maximum_noise": { - "name": "Noise (Maximum)" + "name": "Noise (maximum)" }, "radon": { "name": "Radon" }, "industrial_volatile_organic_compounds": { - "name": "VOCs (Industrial)" + "name": "VOCs (industrial)" }, "virus_index": { - "name": "Virus Index" + "name": "Virus index" } } } diff --git a/homeassistant/components/airthings_ble/config_flow.py b/homeassistant/components/airthings_ble/config_flow.py index 3e7b659bff1..2d32fa6e7df 100644 --- a/homeassistant/components/airthings_ble/config_flow.py +++ b/homeassistant/components/airthings_ble/config_flow.py @@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) @@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) self._discovered_devices[address] = Discovery(name, discovery_info, device) diff --git a/homeassistant/components/airtouch5/config_flow.py b/homeassistant/components/airtouch5/config_flow.py index d96aaed96b7..38c85e45fb8 100644 --- a/homeassistant/components/airtouch5/config_flow.py +++ b/homeassistant/components/airtouch5/config_flow.py @@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN): client = Airtouch5SimpleClient(user_input[CONF_HOST]) try: await client.test_connection() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "cannot_connect"} else: await self.async_set_unique_id(user_input[CONF_HOST]) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 0e21e57ec52..3b6f94df57c 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.10"] + "requirements": ["aioairzone-cloud==0.6.11"] } diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index e70055c20b1..897037987a7 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability): # Fan preset_mode if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}": mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None) - if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None): + if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()): return f"{fan.ATTR_PRESET_MODE}.{mode}" # Humidifier mode diff --git a/homeassistant/components/ambient_network/sensor.py b/homeassistant/components/ambient_network/sensor.py index 9ec6db6ff45..b96da9863a1 100644 --- a/homeassistant/components/ambient_network/sensor.py +++ b/homeassistant/components/ambient_network/sensor.py @@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = ( suggested_display_precision=0, entity_registry_enabled_default=False, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDGUSTMPH, diff --git a/homeassistant/components/ambient_station/sensor.py b/homeassistant/components/ambient_station/sensor.py index 730b798bd15..1b4334774d4 100644 --- a/homeassistant/components/ambient_station/sensor.py +++ b/homeassistant/components/ambient_station/sensor.py @@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = ( translation_key="wind_direction", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDDIR_AVG10M, diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index 1c45e825359..89cc0fc3965 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["androidtvremote2"], - "requirements": ["androidtvremote2==0.2.0"], + "requirements": ["androidtvremote2==0.2.1"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/anova/config_flow.py b/homeassistant/components/anova/config_flow.py index bc4723b1dba..f382606baba 100644 --- a/homeassistant/components/anova/config_flow.py +++ b/homeassistant/components/anova/config_flow.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from anova_wifi import AnovaApi, InvalidLogin import voluptuous as vol @@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) -class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): + +class AnovaConfigFlow(ConfigFlow, domain=DOMAIN): """Sets up a config flow for Anova.""" VERSION = 1 @@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): await api.authenticate() except InvalidLogin: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/anthemav/media_player.py b/homeassistant/components/anthemav/media_player.py index cfbd3c29547..317498e96b5 100644 --- a/homeassistant/components/anthemav/media_player.py +++ b/homeassistant/components/anthemav/media_player.py @@ -22,6 +22,7 @@ from . import AnthemavConfigEntry from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) +VOLUME_STEP = 0.01 async def async_setup_entry( @@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity): | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.SELECT_SOURCE ) + _attr_volume_step = VOLUME_STEP def __init__( self, diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py index 5f1f4fdeea7..e53a479d7d4 100644 --- a/homeassistant/components/anthropic/config_flow.py +++ b/homeassistant/components/anthropic/config_flow.py @@ -34,10 +34,12 @@ from .const import ( CONF_PROMPT, CONF_RECOMMENDED, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TEMPERATURE, + RECOMMENDED_THINKING_BUDGET, ) _LOGGER = logging.getLogger(__name__) @@ -128,21 +130,29 @@ class AnthropicOptionsFlow(OptionsFlow): ) -> ConfigFlowResult: """Manage the options.""" options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options + errors: dict[str, str] = {} if user_input is not None: if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: if user_input[CONF_LLM_HASS_API] == "none": user_input.pop(CONF_LLM_HASS_API) - return self.async_create_entry(title="", data=user_input) - # Re-render the options again, now with the recommended options shown/hidden - self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + if user_input.get( + CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET + ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): + errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" - options = { - CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], - CONF_PROMPT: user_input[CONF_PROMPT], - CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], - } + if not errors: + return self.async_create_entry(title="", data=user_input) + else: + # Re-render the options again, now with the recommended options shown/hidden + self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + + options = { + CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], + CONF_PROMPT: user_input[CONF_PROMPT], + CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], + } suggested_values = options.copy() if not suggested_values.get(CONF_PROMPT): @@ -156,6 +166,7 @@ class AnthropicOptionsFlow(OptionsFlow): return self.async_show_form( step_id="init", data_schema=schema, + errors=errors or None, ) @@ -205,6 +216,10 @@ def anthropic_config_option_schema( CONF_TEMPERATURE, default=RECOMMENDED_TEMPERATURE, ): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), + vol.Optional( + CONF_THINKING_BUDGET, + default=RECOMMENDED_THINKING_BUDGET, + ): int, } ) return schema diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py index 0dbf9c51ac1..38e4270e6e1 100644 --- a/homeassistant/components/anthropic/const.py +++ b/homeassistant/components/anthropic/const.py @@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens" RECOMMENDED_MAX_TOKENS = 1024 CONF_TEMPERATURE = "temperature" RECOMMENDED_TEMPERATURE = 1.0 +CONF_THINKING_BUDGET = "thinking_budget" +RECOMMENDED_THINKING_BUDGET = 0 +MIN_THINKING_BUDGET = 1024 + +THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"] diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py index 8d3ba5085ee..5e5ad464eaa 100644 --- a/homeassistant/components/anthropic/conversation.py +++ b/homeassistant/components/anthropic/conversation.py @@ -1,23 +1,32 @@ """Conversation support for Anthropic.""" -from collections.abc import AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable, Iterable import json -from typing import Any, Literal +from typing import Any, Literal, cast import anthropic from anthropic import AsyncStream from anthropic._types import NOT_GIVEN from anthropic.types import ( InputJSONDelta, - Message, MessageParam, MessageStreamEvent, RawContentBlockDeltaEvent, RawContentBlockStartEvent, RawContentBlockStopEvent, + RawMessageStartEvent, + RawMessageStopEvent, + RedactedThinkingBlock, + RedactedThinkingBlockParam, + SignatureDelta, TextBlock, TextBlockParam, TextDelta, + ThinkingBlock, + ThinkingBlockParam, + ThinkingConfigDisabledParam, + ThinkingConfigEnabledParam, + ThinkingDelta, ToolParam, ToolResultBlockParam, ToolUseBlock, @@ -30,7 +39,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import AnthropicConfigEntry @@ -39,11 +48,15 @@ from .const import ( CONF_MAX_TOKENS, CONF_PROMPT, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, LOGGER, + MIN_THINKING_BUDGET, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TEMPERATURE, + RECOMMENDED_THINKING_BUDGET, + THINKING_MODELS, ) # Max number of back and forth with the LLM to generate a response @@ -71,73 +84,101 @@ def _format_tool( ) -def _message_convert( - message: Message, -) -> MessageParam: - """Convert from class to TypedDict.""" - param_content: list[TextBlockParam | ToolUseBlockParam] = [] +def _convert_content( + chat_content: Iterable[conversation.Content], +) -> list[MessageParam]: + """Transform HA chat_log content into Anthropic API format.""" + messages: list[MessageParam] = [] - for message_content in message.content: - if isinstance(message_content, TextBlock): - param_content.append(TextBlockParam(type="text", text=message_content.text)) - elif isinstance(message_content, ToolUseBlock): - param_content.append( - ToolUseBlockParam( - type="tool_use", - id=message_content.id, - name=message_content.name, - input=message_content.input, - ) + for content in chat_content: + if isinstance(content, conversation.ToolResultContent): + tool_result_block = ToolResultBlockParam( + type="tool_result", + tool_use_id=content.tool_call_id, + content=json.dumps(content.tool_result), ) - - return MessageParam(role=message.role, content=param_content) - - -def _convert_content(chat_content: conversation.Content) -> MessageParam: - """Create tool response content.""" - if isinstance(chat_content, conversation.ToolResultContent): - return MessageParam( - role="user", - content=[ - ToolResultBlockParam( - type="tool_result", - tool_use_id=chat_content.tool_call_id, - content=json.dumps(chat_content.tool_result), - ) - ], - ) - if isinstance(chat_content, conversation.AssistantContent): - return MessageParam( - role="assistant", - content=[ - TextBlockParam(type="text", text=chat_content.content or ""), - *[ - ToolUseBlockParam( - type="tool_use", - id=tool_call.id, - name=tool_call.tool_name, - input=tool_call.tool_args, + if not messages or messages[-1]["role"] != "user": + messages.append( + MessageParam( + role="user", + content=[tool_result_block], ) - for tool_call in chat_content.tool_calls or () - ], - ], - ) - if isinstance(chat_content, conversation.UserContent): - return MessageParam( - role="user", - content=chat_content.content, - ) - # Note: We don't pass SystemContent here as its passed to the API as the prompt - raise ValueError(f"Unexpected content type: {type(chat_content)}") + ) + elif isinstance(messages[-1]["content"], str): + messages[-1]["content"] = [ + TextBlockParam(type="text", text=messages[-1]["content"]), + tool_result_block, + ] + else: + messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined] + elif isinstance(content, conversation.UserContent): + # Combine consequent user messages + if not messages or messages[-1]["role"] != "user": + messages.append( + MessageParam( + role="user", + content=content.content, + ) + ) + elif isinstance(messages[-1]["content"], str): + messages[-1]["content"] = [ + TextBlockParam(type="text", text=messages[-1]["content"]), + TextBlockParam(type="text", text=content.content), + ] + else: + messages[-1]["content"].append( # type: ignore[attr-defined] + TextBlockParam(type="text", text=content.content) + ) + elif isinstance(content, conversation.AssistantContent): + # Combine consequent assistant messages + if not messages or messages[-1]["role"] != "assistant": + messages.append( + MessageParam( + role="assistant", + content=[], + ) + ) + + if content.content: + messages[-1]["content"].append( # type: ignore[union-attr] + TextBlockParam(type="text", text=content.content) + ) + if content.tool_calls: + messages[-1]["content"].extend( # type: ignore[union-attr] + [ + ToolUseBlockParam( + type="tool_use", + id=tool_call.id, + name=tool_call.tool_name, + input=tool_call.tool_args, + ) + for tool_call in content.tool_calls + ] + ) + else: + # Note: We don't pass SystemContent here as its passed to the API as the prompt + raise TypeError(f"Unexpected content type: {type(content)}") + + return messages async def _transform_stream( result: AsyncStream[MessageStreamEvent], + messages: list[MessageParam], ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: """Transform the response stream into HA format. A typical stream of responses might look something like the following: - RawMessageStartEvent with no content + - RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled) + - RawContentBlockDeltaEvent with a ThinkingDelta + - RawContentBlockDeltaEvent with a ThinkingDelta + - RawContentBlockDeltaEvent with a ThinkingDelta + - ... + - RawContentBlockDeltaEvent with a SignatureDelta + - RawContentBlockStopEvent + - RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally) + - RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta) - RawContentBlockStartEvent with an empty TextBlock - RawContentBlockDeltaEvent with a TextDelta - RawContentBlockDeltaEvent with a TextDelta @@ -151,44 +192,103 @@ async def _transform_stream( - RawContentBlockStopEvent - RawMessageDeltaEvent with a stop_reason='tool_use' - RawMessageStopEvent(type='message_stop') + + Each message could contain multiple blocks of the same type. """ if result is None: raise TypeError("Expected a stream of messages") - current_tool_call: dict | None = None + current_message: MessageParam | None = None + current_block: ( + TextBlockParam + | ToolUseBlockParam + | ThinkingBlockParam + | RedactedThinkingBlockParam + | None + ) = None + current_tool_args: str async for response in result: LOGGER.debug("Received response: %s", response) - if isinstance(response, RawContentBlockStartEvent): + if isinstance(response, RawMessageStartEvent): + if response.message.role != "assistant": + raise ValueError("Unexpected message role") + current_message = MessageParam(role=response.message.role, content=[]) + elif isinstance(response, RawContentBlockStartEvent): if isinstance(response.content_block, ToolUseBlock): - current_tool_call = { - "id": response.content_block.id, - "name": response.content_block.name, - "input": "", - } + current_block = ToolUseBlockParam( + type="tool_use", + id=response.content_block.id, + name=response.content_block.name, + input="", + ) + current_tool_args = "" elif isinstance(response.content_block, TextBlock): + current_block = TextBlockParam( + type="text", text=response.content_block.text + ) yield {"role": "assistant"} + if response.content_block.text: + yield {"content": response.content_block.text} + elif isinstance(response.content_block, ThinkingBlock): + current_block = ThinkingBlockParam( + type="thinking", + thinking=response.content_block.thinking, + signature=response.content_block.signature, + ) + elif isinstance(response.content_block, RedactedThinkingBlock): + current_block = RedactedThinkingBlockParam( + type="redacted_thinking", data=response.content_block.data + ) + LOGGER.debug( + "Some of Claude’s internal reasoning has been automatically " + "encrypted for safety reasons. This doesn’t affect the quality of " + "responses" + ) elif isinstance(response, RawContentBlockDeltaEvent): + if current_block is None: + raise ValueError("Unexpected delta without a block") if isinstance(response.delta, InputJSONDelta): - if current_tool_call is None: - raise ValueError("Unexpected delta without a tool call") - current_tool_call["input"] += response.delta.partial_json + current_tool_args += response.delta.partial_json elif isinstance(response.delta, TextDelta): - LOGGER.debug("yielding delta: %s", response.delta.text) + text_block = cast(TextBlockParam, current_block) + text_block["text"] += response.delta.text yield {"content": response.delta.text} + elif isinstance(response.delta, ThinkingDelta): + thinking_block = cast(ThinkingBlockParam, current_block) + thinking_block["thinking"] += response.delta.thinking + elif isinstance(response.delta, SignatureDelta): + thinking_block = cast(ThinkingBlockParam, current_block) + thinking_block["signature"] += response.delta.signature elif isinstance(response, RawContentBlockStopEvent): - if current_tool_call: + if current_block is None: + raise ValueError("Unexpected stop event without a current block") + if current_block["type"] == "tool_use": + tool_block = cast(ToolUseBlockParam, current_block) + tool_args = json.loads(current_tool_args) + tool_block["input"] = tool_args yield { "tool_calls": [ llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["name"], - tool_args=json.loads(current_tool_call["input"]), + id=tool_block["id"], + tool_name=tool_block["name"], + tool_args=tool_args, ) ] } - current_tool_call = None + elif current_block["type"] == "thinking": + thinking_block = cast(ThinkingBlockParam, current_block) + LOGGER.debug("Thinking: %s", thinking_block["thinking"]) + + if current_message is None: + raise ValueError("Unexpected stop event without a current message") + current_message["content"].append(current_block) # type: ignore[union-attr] + current_block = None + elif isinstance(response, RawMessageStopEvent): + if current_message is not None: + messages.append(current_message) + current_message = None class AnthropicConversationEntity( @@ -226,18 +326,6 @@ class AnthropicConversationEntity( self.entry.add_update_listener(self._async_entry_update_listener) ) - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, @@ -266,34 +354,50 @@ class AnthropicConversationEntity( system = chat_log.content[0] if not isinstance(system, conversation.SystemContent): raise TypeError("First message must be a system message") - messages = [_convert_content(content) for content in chat_log.content[1:]] + messages = _convert_content(chat_log.content[1:]) client = self.entry.runtime_data + thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) + model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) + # To prevent infinite loops, we limit the number of iterations for _iteration in range(MAX_TOOL_ITERATIONS): - try: - stream = await client.messages.create( - model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), - messages=messages, - tools=tools or NOT_GIVEN, - max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), - system=system.content, - temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), - stream=True, + model_args = { + "model": model, + "messages": messages, + "tools": tools or NOT_GIVEN, + "max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), + "system": system.content, + "stream": True, + } + if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET: + model_args["thinking"] = ThinkingConfigEnabledParam( + type="enabled", budget_tokens=thinking_budget ) + else: + model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled") + model_args["temperature"] = options.get( + CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE + ) + + try: + stream = await client.messages.create(**model_args) except anthropic.AnthropicError as err: raise HomeAssistantError( f"Sorry, I had a problem talking to Anthropic: {err}" ) from err messages.extend( - [ - _convert_content(content) - async for content in chat_log.async_add_delta_content_stream( - user_input.agent_id, _transform_stream(stream) - ) - ] + _convert_content( + [ + content + async for content in chat_log.async_add_delta_content_stream( + user_input.agent_id, _transform_stream(stream, messages) + ) + if not isinstance(content, conversation.AssistantContent) + ] + ) ) if not chat_log.unresponded_tool_results: diff --git a/homeassistant/components/anthropic/strings.json b/homeassistant/components/anthropic/strings.json index 9550a1a6672..c2caf3a6666 100644 --- a/homeassistant/components/anthropic/strings.json +++ b/homeassistant/components/anthropic/strings.json @@ -23,12 +23,17 @@ "max_tokens": "Maximum tokens to return in response", "temperature": "Temperature", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", - "recommended": "Recommended model settings" + "recommended": "Recommended model settings", + "thinking_budget_tokens": "Thinking budget" }, "data_description": { - "prompt": "Instruct how the LLM should respond. This can be a template." + "prompt": "Instruct how the LLM should respond. This can be a template.", + "thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking." } } + }, + "error": { + "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget." } } } diff --git a/homeassistant/components/apcupsd/strings.json b/homeassistant/components/apcupsd/strings.json index 93102ac1393..fb5df9ec390 100644 --- a/homeassistant/components/apcupsd/strings.json +++ b/homeassistant/components/apcupsd/strings.json @@ -57,7 +57,7 @@ "name": "Status date" }, "dip_switch_settings": { - "name": "Dip switch settings" + "name": "DIP switch settings" }, "low_battery_signal": { "name": "Low battery signal" diff --git a/homeassistant/components/aprilaire/manifest.json b/homeassistant/components/aprilaire/manifest.json index 577de8ae88d..b40460dd61b 100644 --- a/homeassistant/components/aprilaire/manifest.json +++ b/homeassistant/components/aprilaire/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["pyaprilaire"], - "requirements": ["pyaprilaire==0.7.7"] + "requirements": ["pyaprilaire==0.8.1"] } diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index 1ee89035d93..277cb742486 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AuthenticationFailed: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/arwn/sensor.py b/homeassistant/components/arwn/sensor.py index a31156bbba6..4cc4feed2d4 100644 --- a/homeassistant/components/arwn/sensor.py +++ b/homeassistant/components/arwn/sensor.py @@ -6,7 +6,11 @@ import logging from typing import Any from homeassistant.components import mqtt -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] | DEGREE, "mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ] return None @@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity): units: str, icon: str | None = None, device_class: SensorDeviceClass | None = None, + state_class: SensorStateClass | None = None, ) -> None: """Initialize the sensor.""" self.entity_id = _slug(name) @@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity): self._attr_native_unit_of_measurement = units self._attr_icon = icon self._attr_device_class = device_class + self._attr_state_class = state_class def set_event(self, event: dict[str, Any]) -> None: """Update the sensor with the most recent event.""" diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index a028fa638df..a205db4e615 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -125,7 +125,7 @@ SAVE_DELAY = 10 @callback def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool: """Filter out intents that are not local fallback.""" - return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND) + return result.intent.name in (intent.INTENT_GET_STATE) @callback @@ -649,6 +649,7 @@ class PipelineRun: data["runner_data"] = self.runner_data if self.tts_stream: data["tts_output"] = { + "token": self.tts_stream.token, "url": self.tts_stream.url, "mime_type": self.tts_stream.content_type, } @@ -1295,6 +1296,7 @@ class PipelineRun: tts_output = { "media_id": tts_media_id, + "token": self.tts_stream.token, "url": self.tts_stream.url, "mime_type": self.tts_stream.content_type, } diff --git a/homeassistant/components/assist_satellite/__init__.py b/homeassistant/components/assist_satellite/__init__.py index 038ff517264..31afbda1d11 100644 --- a/homeassistant/components/assist_satellite/__init__.py +++ b/homeassistant/components/assist_satellite/__init__.py @@ -56,6 +56,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("message"): str, vol.Optional("media_id"): str, + vol.Optional("preannounce_media_id"): str, } ), cv.has_at_least_one_key("message", "media_id"), @@ -70,6 +71,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("start_message"): str, vol.Optional("start_media_id"): str, + vol.Optional("preannounce_media_id"): str, vol.Optional("extra_system_prompt"): str, } ), diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py index 8c63525294c..450e6cadbc9 100644 --- a/homeassistant/components/assist_satellite/entity.py +++ b/homeassistant/components/assist_satellite/entity.py @@ -23,9 +23,6 @@ from homeassistant.components.assist_pipeline import ( vad, ) from homeassistant.components.media_player import async_process_play_media_url -from homeassistant.components.tts import ( - generate_media_source_id as tts_generate_media_source_id, -) from homeassistant.core import Context, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import chat_session, entity @@ -98,9 +95,15 @@ class AssistSatelliteAnnouncement: original_media_id: str """The raw media ID before processing.""" + tts_token: str | None + """The TTS token of the media.""" + media_id_source: Literal["url", "media_id", "tts"] """Source of the media ID.""" + preannounce_media_id: str | None = None + """Media ID to be played before announcement.""" + class AssistSatelliteEntity(entity.Entity): """Entity encapsulating the state and functionality of an Assist satellite.""" @@ -177,6 +180,7 @@ class AssistSatelliteEntity(entity.Entity): self, message: str | None = None, media_id: str | None = None, + preannounce_media_id: str | None = None, ) -> None: """Play and show an announcement on the satellite. @@ -186,6 +190,8 @@ class AssistSatelliteEntity(entity.Entity): If media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce_media_id is provided, it is played before the announcement. + Calls async_announce with message and media id. """ await self._cancel_running_pipeline() @@ -193,7 +199,9 @@ class AssistSatelliteEntity(entity.Entity): if message is None: message = "" - announcement = await self._resolve_announcement_media_id(message, media_id) + announcement = await self._resolve_announcement_media_id( + message, media_id, preannounce_media_id + ) if self._is_announcing: raise SatelliteBusyError @@ -220,6 +228,7 @@ class AssistSatelliteEntity(entity.Entity): start_message: str | None = None, start_media_id: str | None = None, extra_system_prompt: str | None = None, + preannounce_media_id: str | None = None, ) -> None: """Start a conversation from the satellite. @@ -229,6 +238,8 @@ class AssistSatelliteEntity(entity.Entity): If start_media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce_media_id is provided, it is played before the announcement. + Calls async_start_conversation. """ await self._cancel_running_pipeline() @@ -244,13 +255,15 @@ class AssistSatelliteEntity(entity.Entity): start_message = "" announcement = await self._resolve_announcement_media_id( - start_message, start_media_id + start_message, start_media_id, preannounce_media_id ) if self._is_announcing: raise SatelliteBusyError self._is_announcing = True + self._set_state(AssistSatelliteState.RESPONDING) + # Provide our start info to the LLM so it understands context of incoming message if extra_system_prompt is not None: self._extra_system_prompt = extra_system_prompt @@ -280,6 +293,7 @@ class AssistSatelliteEntity(entity.Entity): raise finally: self._is_announcing = False + self._set_state(AssistSatelliteState.IDLE) async def async_start_conversation( self, start_announcement: AssistSatelliteAnnouncement @@ -470,20 +484,27 @@ class AssistSatelliteEntity(entity.Entity): return vad.VadSensitivity.to_seconds(vad_sensitivity) async def _resolve_announcement_media_id( - self, message: str, media_id: str | None + self, + message: str, + media_id: str | None, + preannounce_media_id: str | None = None, ) -> AssistSatelliteAnnouncement: """Resolve the media ID.""" media_id_source: Literal["url", "media_id", "tts"] | None = None + tts_token: str | None = None if media_id: original_media_id = media_id - else: media_id_source = "tts" # Synthesize audio and get URL pipeline_id = self._resolve_pipeline() pipeline = async_get_pipeline(self.hass, pipeline_id) + engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine) + if engine is None: + raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found") + tts_options: dict[str, Any] = {} if pipeline.tts_voice is not None: tts_options[tts.ATTR_VOICE] = pipeline.tts_voice @@ -491,14 +512,23 @@ class AssistSatelliteEntity(entity.Entity): if self.tts_options is not None: tts_options.update(self.tts_options) - media_id = tts_generate_media_source_id( + stream = tts.async_create_stream( self.hass, - message, - engine=pipeline.tts_engine, + engine=engine, + language=pipeline.tts_language, + options=tts_options, + ) + stream.async_set_message(message) + + tts_token = stream.token + media_id = stream.url + original_media_id = tts.generate_media_source_id( + self.hass, + message, + engine=engine, language=pipeline.tts_language, options=tts_options, ) - original_media_id = media_id if media_source.is_media_source_id(media_id): if not media_id_source: @@ -516,6 +546,26 @@ class AssistSatelliteEntity(entity.Entity): # Resolve to full URL media_id = async_process_play_media_url(self.hass, media_id) + # Resolve preannounce media id + if preannounce_media_id: + if media_source.is_media_source_id(preannounce_media_id): + preannounce_media = await media_source.async_resolve_media( + self.hass, + preannounce_media_id, + None, + ) + preannounce_media_id = preannounce_media.url + + # Resolve to full URL + preannounce_media_id = async_process_play_media_url( + self.hass, preannounce_media_id + ) + return AssistSatelliteAnnouncement( - message, media_id, original_media_id, media_id_source + message=message, + media_id=media_id, + original_media_id=original_media_id, + tts_token=tts_token, + media_id_source=media_id_source, + preannounce_media_id=preannounce_media_id, ) diff --git a/homeassistant/components/assist_satellite/services.yaml b/homeassistant/components/assist_satellite/services.yaml index 89a20ada6f3..fd6a4f23ccc 100644 --- a/homeassistant/components/assist_satellite/services.yaml +++ b/homeassistant/components/assist_satellite/services.yaml @@ -14,6 +14,10 @@ announce: required: false selector: text: + preannounce_media_id: + required: false + selector: + text: start_conversation: target: entity: @@ -34,3 +38,7 @@ start_conversation: required: false selector: text: + preannounce_media_id: + required: false + selector: + text: diff --git a/homeassistant/components/assist_satellite/strings.json b/homeassistant/components/assist_satellite/strings.json index fa2dc984ab7..2bb61516bca 100644 --- a/homeassistant/components/assist_satellite/strings.json +++ b/homeassistant/components/assist_satellite/strings.json @@ -23,6 +23,10 @@ "media_id": { "name": "Media ID", "description": "The media ID to announce instead of using text-to-speech." + }, + "preannounce_media_id": { + "name": "Preannounce Media ID", + "description": "The media ID to play before the announcement." } } }, @@ -41,6 +45,10 @@ "extra_system_prompt": { "name": "Extra system prompt", "description": "Provide background information to the AI about the request." + }, + "preannounce_media_id": { + "name": "Preannounce Media ID", + "description": "The media ID to play before the start message or media." } } } diff --git a/homeassistant/components/asuswrt/strings.json b/homeassistant/components/asuswrt/strings.json index 9d50f50c7e9..cac37c0cfd0 100644 --- a/homeassistant/components/asuswrt/strings.json +++ b/homeassistant/components/asuswrt/strings.json @@ -66,28 +66,28 @@ "name": "Upload" }, "load_avg_1m": { - "name": "Average load (1m)" + "name": "Average load (1 min)" }, "load_avg_5m": { - "name": "Average load (5m)" + "name": "Average load (5 min)" }, "load_avg_15m": { - "name": "Average load (15m)" + "name": "Average load (15 min)" }, "24ghz_temperature": { - "name": "2.4GHz Temperature" + "name": "2.4GHz temperature" }, "5ghz_temperature": { - "name": "5GHz Temperature" + "name": "5GHz temperature" }, "cpu_temperature": { - "name": "CPU Temperature" + "name": "CPU temperature" }, "5ghz_2_temperature": { - "name": "5GHz Temperature (Radio 2)" + "name": "5GHz temperature (Radio 2)" }, "6ghz_temperature": { - "name": "6GHz Temperature" + "name": "6GHz temperature" }, "cpu_usage": { "name": "CPU usage" diff --git a/homeassistant/components/azure_devops/strings.json b/homeassistant/components/azure_devops/strings.json index f5fe5cd06a7..611a8b9a758 100644 --- a/homeassistant/components/azure_devops/strings.json +++ b/homeassistant/components/azure_devops/strings.json @@ -14,7 +14,7 @@ "personal_access_token": "Personal Access Token (PAT)" }, "description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.", - "title": "Add Azure DevOps Project" + "title": "Add Azure DevOps project" }, "reauth_confirm": { "data": { @@ -32,7 +32,7 @@ "entity": { "sensor": { "build_id": { - "name": "{definition_name} latest build id" + "name": "{definition_name} latest build ID" }, "finish_time": { "name": "{definition_name} latest build finish time" @@ -59,7 +59,7 @@ "name": "{definition_name} latest build start time" }, "url": { - "name": "{definition_name} latest build url" + "name": "{definition_name} latest build URL" }, "work_item_count": { "name": "{item_type} {item_state} work items" @@ -68,7 +68,7 @@ }, "exceptions": { "authentication_failed": { - "message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token." + "message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token." } } } diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index d9d1c3cc2fe..124ce8b872c 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -1,7 +1,9 @@ """The Backup integration.""" +from homeassistant.config_entries import SOURCE_SYSTEM +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.backup import DATA_BACKUP from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType @@ -18,10 +20,12 @@ from .agent import ( ) from .config import BackupConfig, CreateBackupParametersDict from .const import DATA_MANAGER, DOMAIN +from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator from .http import async_register_http_views from .manager import ( BackupManager, BackupManagerError, + BackupPlatformEvent, BackupPlatformProtocol, BackupReaderWriter, BackupReaderWriterError, @@ -52,6 +56,7 @@ __all__ = [ "BackupConfig", "BackupManagerError", "BackupNotFound", + "BackupPlatformEvent", "BackupPlatformProtocol", "BackupReaderWriter", "BackupReaderWriterError", @@ -74,6 +79,8 @@ __all__ = [ "suggested_filename_from_name_date", ] +PLATFORMS = [Platform.SENSOR] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_http_views(hass) + discovery_flow.async_create_flow( + hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={} + ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Set up a config entry.""" + backup_manager: BackupManager = hass.data[DATA_MANAGER] + coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager) + await coordinator.async_config_entry_first_refresh() + + entry.async_on_unload(coordinator.async_unsubscribe) + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/backup/config_flow.py b/homeassistant/components/backup/config_flow.py new file mode 100644 index 00000000000..ab1f884ea86 --- /dev/null +++ b/homeassistant/components/backup/config_flow.py @@ -0,0 +1,21 @@ +"""Config flow for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import DOMAIN + + +class BackupConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Home Assistant Backup.""" + + VERSION = 1 + + async def async_step_system( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + return self.async_create_entry(title="Backup", data={}) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index c2070a37b2d..773deaef174 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) EXCLUDE_FROM_BACKUP = [ - "__pycache__/*", - ".DS_Store", + "**/__pycache__/*", + "**/.DS_Store", ".HA_RESTORE", "*.db-shm", "*.log.*", diff --git a/homeassistant/components/backup/coordinator.py b/homeassistant/components/backup/coordinator.py new file mode 100644 index 00000000000..377f23567e0 --- /dev/null +++ b/homeassistant/components/backup/coordinator.py @@ -0,0 +1,81 @@ +"""Coordinator for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.backup import ( + async_subscribe_events, + async_subscribe_platform_events, +) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER +from .manager import ( + BackupManager, + BackupManagerState, + BackupPlatformEvent, + ManagerStateEvent, +) + +type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator] + + +@dataclass +class BackupCoordinatorData: + """Class to hold backup data.""" + + backup_manager_state: BackupManagerState + last_successful_automatic_backup: datetime | None + next_scheduled_automatic_backup: datetime | None + + +class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]): + """Class to retrieve backup status.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: ConfigEntry, + backup_manager: BackupManager, + ) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + update_interval=None, + ) + self.unsubscribe: list[Callable[[], None]] = [ + async_subscribe_events(hass, self._on_event), + async_subscribe_platform_events(hass, self._on_event), + ] + + self.backup_manager = backup_manager + + @callback + def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None: + """Handle new event.""" + LOGGER.debug("Received backup event: %s", event) + self.config_entry.async_create_task(self.hass, self.async_refresh()) + + async def _async_update_data(self) -> BackupCoordinatorData: + """Update backup manager data.""" + return BackupCoordinatorData( + self.backup_manager.state, + self.backup_manager.config.data.last_completed_automatic_backup, + self.backup_manager.config.data.schedule.next_automatic_backup, + ) + + @callback + def async_unsubscribe(self) -> None: + """Unsubscribe from events.""" + for unsub in self.unsubscribe: + unsub() diff --git a/homeassistant/components/backup/diagnostics.py b/homeassistant/components/backup/diagnostics.py new file mode 100644 index 00000000000..9c3e28bde5b --- /dev/null +++ b/homeassistant/components/backup/diagnostics.py @@ -0,0 +1,27 @@ +"""Diagnostics support for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from .coordinator import BackupConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: BackupConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + return { + "backup_agents": [ + {"name": agent.name, "agent_id": agent.agent_id} + for agent in coordinator.backup_manager.backup_agents.values() + ], + "backup_config": async_redact_data( + coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD] + ), + } diff --git a/homeassistant/components/backup/entity.py b/homeassistant/components/backup/entity.py new file mode 100644 index 00000000000..ff7c7889dc5 --- /dev/null +++ b/homeassistant/components/backup/entity.py @@ -0,0 +1,36 @@ +"""Base for backup entities.""" + +from __future__ import annotations + +from homeassistant.const import __version__ as HA_VERSION +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import BackupDataUpdateCoordinator + + +class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]): + """Base entity for backup manager.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BackupDataUpdateCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, "backup_manager")}, + manufacturer="Home Assistant", + model="Home Assistant Backup", + sw_version=HA_VERSION, + name="Backup", + entry_type=DeviceEntryType.SERVICE, + configuration_url="homeassistant://config/backup", + ) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index bfaa5c5a48e..43a7be6db8d 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -120,6 +120,7 @@ class BackupManagerState(StrEnum): IDLE = "idle" CREATE_BACKUP = "create_backup" + BLOCKED = "blocked" RECEIVE_BACKUP = "receive_backup" RESTORE_BACKUP = "restore_backup" @@ -228,6 +229,20 @@ class RestoreBackupEvent(ManagerStateEvent): state: RestoreBackupState +@dataclass(frozen=True, kw_only=True, slots=True) +class BackupPlatformEvent: + """Backup platform class.""" + + domain: str + + +@dataclass(frozen=True, kw_only=True, slots=True) +class BlockedEvent(ManagerStateEvent): + """Backup manager blocked, Home Assistant is starting.""" + + manager_state: BackupManagerState = BackupManagerState.BLOCKED + + class BackupPlatformProtocol(Protocol): """Define the format that backup platforms can have.""" @@ -342,11 +357,14 @@ class BackupManager: self.remove_next_delete_event: Callable[[], None] | None = None # Latest backup event and backup event subscribers - self.last_event: ManagerStateEvent = IdleEvent() - self.last_non_idle_event: ManagerStateEvent | None = None + self.last_event: ManagerStateEvent = BlockedEvent() + self.last_action_event: ManagerStateEvent | None = None self._backup_event_subscriptions = hass.data[ DATA_BACKUP ].backup_event_subscriptions + self._backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions async def async_setup(self) -> None: """Set up the backup manager.""" @@ -356,10 +374,19 @@ class BackupManager: self.known_backups.load(stored["backups"]) await self._reader_writer.async_validate_config(config=self.config) + await self._reader_writer.async_resume_restore_progress_after_restart( on_progress=self.async_on_backup_event ) + async def set_manager_idle_after_start(hass: HomeAssistant) -> None: + """Set manager to idle after start.""" + self.async_on_backup_event(IdleEvent()) + + if self.state == BackupManagerState.BLOCKED: + # If we're not finishing a restore job, set the manager to idle after start + start.async_at_started(self.hass, set_manager_idle_after_start) + await self.load_platforms() @property @@ -448,6 +475,9 @@ class BackupManager: LOGGER.debug("%s platforms loaded in total", len(self.platforms)) LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + event = BackupPlatformEvent(domain=integration_domain) + for subscription in self._backup_platform_event_subscriptions: + subscription(event) async def async_pre_backup_actions(self) -> None: """Perform pre backup actions.""" @@ -1319,8 +1349,8 @@ class BackupManager: if (current_state := self.state) != (new_state := event.manager_state): LOGGER.debug("Backup state: %s -> %s", current_state, new_state) self.last_event = event - if not isinstance(event, IdleEvent): - self.last_non_idle_event = event + if not isinstance(event, (BlockedEvent, IdleEvent)): + self.last_action_event = event for subscription in self._backup_event_subscriptions: subscription(event) @@ -1696,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter): """Filter to filter excludes.""" for exclude in excludes: - if not path.match(exclude): + # The home assistant core configuration directory is added as "data" + # in the tar file, so we need to prefix that path to the filters. + if not path.full_match(f"data/{exclude}"): continue LOGGER.debug("Ignoring %s because of %s", path, exclude) return True diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index db0719983b1..3c7b1e5e014 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -5,8 +5,9 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", - "integration_type": "system", + "integration_type": "service", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["cronsim==2.6", "securetar==2025.2.1"] + "requirements": ["cronsim==2.6", "securetar==2025.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/backup/sensor.py b/homeassistant/components/backup/sensor.py new file mode 100644 index 00000000000..59e98ae7c2d --- /dev/null +++ b/homeassistant/components/backup/sensor.py @@ -0,0 +1,75 @@ +"""Sensor platform for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import BackupConfigEntry, BackupCoordinatorData +from .entity import BackupManagerEntity +from .manager import BackupManagerState + + +@dataclass(kw_only=True, frozen=True) +class BackupSensorEntityDescription(SensorEntityDescription): + """Description for Home Assistant Backup sensor entities.""" + + value_fn: Callable[[BackupCoordinatorData], str | datetime | None] + + +BACKUP_MANAGER_DESCRIPTIONS = ( + BackupSensorEntityDescription( + key="backup_manager_state", + translation_key="backup_manager_state", + device_class=SensorDeviceClass.ENUM, + options=[state.value for state in BackupManagerState], + value_fn=lambda data: data.backup_manager_state, + ), + BackupSensorEntityDescription( + key="next_scheduled_automatic_backup", + translation_key="next_scheduled_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.next_scheduled_automatic_backup, + ), + BackupSensorEntityDescription( + key="last_successful_automatic_backup", + translation_key="last_successful_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.last_successful_automatic_backup, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BackupConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Sensor set up for backup config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + BackupManagerSensor(coordinator, description) + for description in BACKUP_MANAGER_DESCRIPTIONS + ) + + +class BackupManagerSensor(BackupManagerEntity, SensorEntity): + """Sensor to track backup manager state.""" + + entity_description: BackupSensorEntityDescription + + @property + def native_value(self) -> str | datetime | None: + """Return native value of entity.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index c3047d3a4ac..487fdd89a7c 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -22,5 +22,24 @@ "name": "Create automatic backup", "description": "Creates a new backup with automatic backup settings." } + }, + "entity": { + "sensor": { + "backup_manager_state": { + "name": "Backup Manager State", + "state": { + "idle": "Idle", + "create_backup": "Creating a backup", + "receive_backup": "Receiving a backup", + "restore_backup": "Restoring a backup" + } + }, + "next_scheduled_automatic_backup": { + "name": "Next scheduled automatic backup" + }, + "last_successful_automatic_backup": { + "name": "Last successful automatic backup" + } + } } } diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 8b5f35287dd..4c370a4224d 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -55,7 +55,7 @@ async def handle_info( "backups": list(backups.values()), "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, - "last_non_idle_event": manager.last_non_idle_event, + "last_action_event": manager.last_action_event, "next_automatic_backup": manager.config.data.schedule.next_automatic_backup, "next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional, "state": manager.state, diff --git a/homeassistant/components/baf/strings.json b/homeassistant/components/baf/strings.json index e2f02a6095e..64956984bb8 100644 --- a/homeassistant/components/baf/strings.json +++ b/homeassistant/components/baf/strings.json @@ -23,7 +23,7 @@ "entity": { "climate": { "auto_comfort": { - "name": "Auto comfort" + "name": "Auto Comfort" } }, "fan": { @@ -39,25 +39,25 @@ }, "number": { "comfort_min_speed": { - "name": "Auto Comfort Minimum Speed" + "name": "Auto Comfort minimum speed" }, "comfort_max_speed": { - "name": "Auto Comfort Maximum Speed" + "name": "Auto Comfort maximum speed" }, "comfort_heat_assist_speed": { - "name": "Auto Comfort Heat Assist Speed" + "name": "Auto Comfort Heat Assist speed" }, "return_to_auto_timeout": { - "name": "Return to Auto Timeout" + "name": "Return to Auto timeout" }, "motion_sense_timeout": { - "name": "Motion Sense Timeout" + "name": "Motion sense timeout" }, "light_return_to_auto_timeout": { - "name": "Light Return to Auto Timeout" + "name": "Light return to Auto timeout" }, "light_auto_motion_timeout": { - "name": "Light Motion Sense Timeout" + "name": "Light motion sense timeout" } }, "sensor": { @@ -76,10 +76,10 @@ }, "switch": { "legacy_ir_remote_enable": { - "name": "Legacy IR Remote" + "name": "Legacy IR remote" }, "led_indicators_enable": { - "name": "Led Indicators" + "name": "LED indicators" }, "comfort_heat_assist_enable": { "name": "Auto Comfort Heat Assist" @@ -88,10 +88,10 @@ "name": "Beep" }, "eco_enable": { - "name": "Eco Mode" + "name": "Eco mode" }, "motion_sense_enable": { - "name": "Motion Sense" + "name": "Motion sense" }, "return_to_auto_enable": { "name": "Return to Auto" @@ -103,7 +103,7 @@ "name": "Dim to Warm" }, "light_return_to_auto_enable": { - "name": "Light Return to Auto" + "name": "Light return to Auto" } } } diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index 278e9b6d47c..422dc4be567 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -274,7 +274,7 @@ "message": "An error occurred while attempting to play {media_type}: {error_message}." }, "invalid_grouping_entity": { - "message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?" + "message": "Entity with ID {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?" }, "invalid_sound_mode": { "message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}." diff --git a/homeassistant/components/bluesound/config_flow.py b/homeassistant/components/bluesound/config_flow.py index 2f002b70e1d..cfb6646d829 100644 --- a/homeassistant/components/bluesound/config_flow.py +++ b/homeassistant/components/bluesound/config_flow.py @@ -75,6 +75,9 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" + # the player can have an ipv6 address, but the api is only available on ipv4 + if discovery_info.ip_address.version != 4: + return self.async_abort(reason="no_ipv4_address") if discovery_info.port is not None: self._port = discovery_info.port diff --git a/homeassistant/components/bluesound/strings.json b/homeassistant/components/bluesound/strings.json index b50c01a11bf..1170e0b92e0 100644 --- a/homeassistant/components/bluesound/strings.json +++ b/homeassistant/components/bluesound/strings.json @@ -19,7 +19,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "no_ipv4_address": "No IPv4 address found." }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index ec617b82a04..e4257221374 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,9 +18,9 @@ "bleak==0.22.3", "bleak-retry-connector==3.9.0", "bluetooth-adapters==0.21.4", - "bluetooth-auto-recovery==1.4.4", - "bluetooth-data-tools==1.25.0", - "dbus-fast==2.39.3", - "habluetooth==3.25.0" + "bluetooth-auto-recovery==1.4.5", + "bluetooth-data-tools==1.26.1", + "dbus-fast==2.43.0", + "habluetooth==3.37.0" ] } diff --git a/homeassistant/components/bosch_alarm/__init__.py b/homeassistant/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..bc7fee46f60 --- /dev/null +++ b/homeassistant/components/bosch_alarm/__init__.py @@ -0,0 +1,62 @@ +"""The Bosch Alarm integration.""" + +from __future__ import annotations + +from ssl import SSLError + +from bosch_alarm_mode2 import Panel + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL] + +type BoschAlarmConfigEntry = ConfigEntry[Panel] + + +async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Set up Bosch Alarm from a config entry.""" + + panel = Panel( + host=entry.data[CONF_HOST], + port=entry.data[CONF_PORT], + automation_code=entry.data.get(CONF_PASSWORD), + installer_or_user_code=entry.data.get( + CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE) + ), + ) + try: + await panel.connect() + except (PermissionError, ValueError) as err: + await panel.disconnect() + raise ConfigEntryNotReady from err + except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err: + await panel.disconnect() + raise ConfigEntryNotReady("Connection failed") from err + + entry.runtime_data = panel + + device_registry = dr.async_get(hass) + + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.unique_id or entry.entry_id)}, + name=f"Bosch {panel.model}", + manufacturer="Bosch Security Systems", + model=panel.model, + sw_version=panel.firmware_version, + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.disconnect() + return unload_ok diff --git a/homeassistant/components/bosch_alarm/alarm_control_panel.py b/homeassistant/components/bosch_alarm/alarm_control_panel.py new file mode 100644 index 00000000000..a1d8a7b90f4 --- /dev/null +++ b/homeassistant/components/bosch_alarm/alarm_control_panel.py @@ -0,0 +1,109 @@ +"""Support for Bosch Alarm Panel.""" + +from __future__ import annotations + +from bosch_alarm_mode2 import Panel + +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntity, + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import BoschAlarmConfigEntry +from .const import DOMAIN + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BoschAlarmConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up control panels for each area.""" + panel = config_entry.runtime_data + + async_add_entities( + AreaAlarmControlPanel( + panel, + area_id, + config_entry.unique_id or config_entry.entry_id, + ) + for area_id in panel.areas + ) + + +class AreaAlarmControlPanel(AlarmControlPanelEntity): + """An alarm control panel entity for a bosch alarm panel.""" + + _attr_has_entity_name = True + _attr_supported_features = ( + AlarmControlPanelEntityFeature.ARM_HOME + | AlarmControlPanelEntityFeature.ARM_AWAY + ) + _attr_code_arm_required = False + _attr_name = None + + def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None: + """Initialise a Bosch Alarm control panel entity.""" + self.panel = panel + self._area = panel.areas[area_id] + self._area_id = area_id + self._attr_unique_id = f"{unique_id}_area_{area_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._attr_unique_id)}, + name=self._area.name, + manufacturer="Bosch Security Systems", + via_device=( + DOMAIN, + unique_id, + ), + ) + + @property + def alarm_state(self) -> AlarmControlPanelState | None: + """Return the state of the alarm.""" + if self._area.is_triggered(): + return AlarmControlPanelState.TRIGGERED + if self._area.is_disarmed(): + return AlarmControlPanelState.DISARMED + if self._area.is_arming(): + return AlarmControlPanelState.ARMING + if self._area.is_pending(): + return AlarmControlPanelState.PENDING + if self._area.is_part_armed(): + return AlarmControlPanelState.ARMED_HOME + if self._area.is_all_armed(): + return AlarmControlPanelState.ARMED_AWAY + return None + + async def async_alarm_disarm(self, code: str | None = None) -> None: + """Disarm this panel.""" + await self.panel.area_disarm(self._area_id) + + async def async_alarm_arm_home(self, code: str | None = None) -> None: + """Send arm home command.""" + await self.panel.area_arm_part(self._area_id) + + async def async_alarm_arm_away(self, code: str | None = None) -> None: + """Send arm away command.""" + await self.panel.area_arm_all(self._area_id) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self.panel.connection_status() + + async def async_added_to_hass(self) -> None: + """Run when entity attached to hass.""" + await super().async_added_to_hass() + self._area.status_observer.attach(self.schedule_update_ha_state) + self.panel.connection_status_observer.attach(self.schedule_update_ha_state) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity removed from hass.""" + await super().async_will_remove_from_hass() + self._area.status_observer.detach(self.schedule_update_ha_state) + self.panel.connection_status_observer.detach(self.schedule_update_ha_state) diff --git a/homeassistant/components/bosch_alarm/config_flow.py b/homeassistant/components/bosch_alarm/config_flow.py new file mode 100644 index 00000000000..e48f2a11944 --- /dev/null +++ b/homeassistant/components/bosch_alarm/config_flow.py @@ -0,0 +1,165 @@ +"""Config flow for Bosch Alarm integration.""" + +from __future__ import annotations + +import asyncio +import logging +import ssl +from typing import Any + +from bosch_alarm_mode2 import Panel +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_CODE, + CONF_HOST, + CONF_MODEL, + CONF_PASSWORD, + CONF_PORT, +) +import homeassistant.helpers.config_validation as cv + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=7700): cv.positive_int, + } +) + +STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema( + { + vol.Required(CONF_USER_CODE): str, + } +) + +STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema( + { + vol.Required(CONF_INSTALLER_CODE): str, + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_AUTH_DATA_SCHEMA_BG = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str}) + + +async def try_connect( + data: dict[str, Any], load_selector: int = 0 +) -> tuple[str, int | None]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + panel = Panel( + host=data[CONF_HOST], + port=data[CONF_PORT], + automation_code=data.get(CONF_PASSWORD), + installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)), + ) + + try: + await panel.connect(load_selector) + finally: + await panel.disconnect() + + return (panel.model, panel.serial_number) + + +class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Bosch Alarm.""" + + def __init__(self) -> None: + """Init config flow.""" + + self._data: dict[str, Any] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + # Use load_selector = 0 to fetch the panel model without authentication. + (model, serial) = await try_connect(user_input, 0) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + asyncio.exceptions.TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + self._data = user_input + self._data[CONF_MODEL] = model + return await self.async_step_auth() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + ) + + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the auth step.""" + errors: dict[str, str] = {} + + # Each model variant requires a different authentication flow + if "Solution" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_SOLUTION + elif "AMAX" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_AMAX + else: + schema = STEP_AUTH_DATA_SCHEMA_BG + + if user_input is not None: + self._data.update(user_input) + try: + (model, serial_number) = await try_connect( + self._data, Panel.LOAD_EXTENDED_INFO + ) + except (PermissionError, ValueError) as e: + errors["base"] = "invalid_auth" + _LOGGER.error("Authentication Error: %s", e) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if serial_number: + await self.async_set_unique_id(str(serial_number)) + self._abort_if_unique_id_configured() + else: + self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]}) + return self.async_create_entry(title=f"Bosch {model}", data=self._data) + + return self.async_show_form( + step_id="auth", + data_schema=self.add_suggested_values_to_schema(schema, user_input), + errors=errors, + ) diff --git a/homeassistant/components/bosch_alarm/const.py b/homeassistant/components/bosch_alarm/const.py new file mode 100644 index 00000000000..7205831391c --- /dev/null +++ b/homeassistant/components/bosch_alarm/const.py @@ -0,0 +1,6 @@ +"""Constants for the Bosch Alarm integration.""" + +DOMAIN = "bosch_alarm" +HISTORY_ATTR = "history" +CONF_INSTALLER_CODE = "installer_code" +CONF_USER_CODE = "user_code" diff --git a/homeassistant/components/bosch_alarm/manifest.json b/homeassistant/components/bosch_alarm/manifest.json new file mode 100644 index 00000000000..a54ace71782 --- /dev/null +++ b/homeassistant/components/bosch_alarm/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "bosch_alarm", + "name": "Bosch Alarm", + "codeowners": ["@mag1024", "@sanjay900"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/bosch_alarm", + "integration_type": "device", + "iot_class": "local_push", + "quality_scale": "bronze", + "requirements": ["bosch-alarm-mode2==0.4.3"] +} diff --git a/homeassistant/components/bosch_alarm/quality_scale.yaml b/homeassistant/components/bosch_alarm/quality_scale.yaml new file mode 100644 index 00000000000..467760fb863 --- /dev/null +++ b/homeassistant/components/bosch_alarm/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions defined + appropriate-polling: + status: exempt + comment: | + No polling + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + Device type integration + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + No repairs + stale-devices: + status: exempt + comment: | + Device type integration + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + Integration does not make any HTTP requests. + strict-typing: done diff --git a/homeassistant/components/bosch_alarm/strings.json b/homeassistant/components/bosch_alarm/strings.json new file mode 100644 index 00000000000..f4846021b55 --- /dev/null +++ b/homeassistant/components/bosch_alarm/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Bosch alarm panel", + "port": "The port used to connect to your Bosch alarm panel. This is usually 7700" + } + }, + "auth": { + "data": { + "password": "[%key:common::config_flow::data::password%]", + "installer_code": "Installer code", + "user_code": "User code" + }, + "data_description": { + "password": "The Mode 2 automation code from your panel", + "installer_code": "The installer code from your panel", + "user_code": "The user code from your panel" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/bring/diagnostics.py b/homeassistant/components/bring/diagnostics.py index 6c2f779ef05..e5cafd30ab5 100644 --- a/homeassistant/components/bring/diagnostics.py +++ b/homeassistant/components/bring/diagnostics.py @@ -4,10 +4,14 @@ from __future__ import annotations from typing import Any +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_EMAIL, CONF_NAME from homeassistant.core import HomeAssistant from .coordinator import BringConfigEntry +TO_REDACT = {CONF_NAME, CONF_EMAIL} + async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: BringConfigEntry @@ -15,7 +19,10 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { - "data": {k: v.to_dict() for k, v in config_entry.runtime_data.data.items()}, + "data": { + k: async_redact_data(v.to_dict(), TO_REDACT) + for k, v in config_entry.runtime_data.data.items() + }, "lists": [lst.to_dict() for lst in config_entry.runtime_data.lists], "user_settings": config_entry.runtime_data.user_settings.to_dict(), } diff --git a/homeassistant/components/bring/event.py b/homeassistant/components/bring/event.py index 08d06b596b8..403856405ce 100644 --- a/homeassistant/components/bring/event.py +++ b/homeassistant/components/bring/event.py @@ -77,9 +77,12 @@ class BringEventEntity(BringBaseEntity, EventEntity): attributes = asdict(activity.content) attributes["last_activity_by"] = next( - x.name - for x in bring_list.users.users - if x.publicUuid == activity.content.publicUserUuid + ( + x.name + for x in bring_list.users.users + if x.publicUuid == activity.content.publicUserUuid + ), + None, ) self._trigger_event( diff --git a/homeassistant/components/bring/manifest.json b/homeassistant/components/bring/manifest.json index f292b10f7dc..b2d42835cce 100644 --- a/homeassistant/components/bring/manifest.json +++ b/homeassistant/components/bring/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["bring_api"], "quality_scale": "platinum", - "requirements": ["bring-api==1.0.2"] + "requirements": ["bring-api==1.1.0"] } diff --git a/homeassistant/components/brother/__init__.py b/homeassistant/components/brother/__init__.py index 464e6629224..1c1768b58fd 100644 --- a/homeassistant/components/brother/__init__.py +++ b/homeassistant/components/brother/__init__.py @@ -9,6 +9,7 @@ from homeassistant.const import CONF_HOST, CONF_TYPE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from .const import DOMAIN from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -25,7 +26,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b host, printer_type=printer_type, snmp_engine=snmp_engine ) except (ConnectionError, SnmpError, TimeoutError) as error: - raise ConfigEntryNotReady from error + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "device": entry.title, + "error": repr(error), + }, + ) from error coordinator = BrotherDataUpdateCoordinator(hass, entry, brother) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/brother/coordinator.py b/homeassistant/components/brother/coordinator.py index 4f518ba8a25..a3c337f27f7 100644 --- a/homeassistant/components/brother/coordinator.py +++ b/homeassistant/components/brother/coordinator.py @@ -26,6 +26,7 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]): ) -> None: """Initialize.""" self.brother = brother + self.device_name = config_entry.title super().__init__( hass, @@ -41,5 +42,12 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]): async with timeout(20): data = await self.brother.async_update() except (ConnectionError, SnmpError, UnsupportedModelError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "device": self.device_name, + "error": repr(error), + }, + ) from error return data diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index b502ed7e3b9..d0714a199c4 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -159,5 +159,13 @@ "name": "Last restart" } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the {device} printer: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the {device} printer: {error}" + } } } diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index a4d39ea07cc..586543de129 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -170,6 +170,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:compass-outline", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="pressure", diff --git a/homeassistant/components/canary/strings.json b/homeassistant/components/canary/strings.json index 699e8b25e11..8be11a48b5e 100644 --- a/homeassistant/components/canary/strings.json +++ b/homeassistant/components/canary/strings.json @@ -21,8 +21,8 @@ "step": { "init": { "data": { - "ffmpeg_arguments": "Arguments passed to ffmpeg for cameras", - "timeout": "Request Timeout (seconds)" + "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras", + "timeout": "Request timeout (seconds)" } } } diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 034cf856023..6c33eac230f 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -16,12 +16,21 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_UUID from homeassistant.core import callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) -KNOWN_HOSTS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) +KNOWN_HOSTS_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_KNOWN_HOSTS, + ): SelectSelector( + SelectSelectorConfig(custom_value=True, options=[], multiple=True), + ) + } +) WANTED_UUID_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) @@ -30,12 +39,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize flow.""" - self._ignore_cec = set[str]() - self._known_hosts = set[str]() - self._wanted_uuid = set[str]() - @staticmethod @callback def async_get_options_flow( @@ -62,48 +65,31 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm the setup.""" - errors = {} - data = {CONF_KNOWN_HOSTS: self._known_hosts} - if user_input is not None: - bad_hosts = False - known_hosts = user_input[CONF_KNOWN_HOSTS] - known_hosts = [x.strip() for x in known_hosts.split(",") if x.strip()] - try: - known_hosts = KNOWN_HOSTS_SCHEMA(known_hosts) - except vol.Invalid: - errors["base"] = "invalid_known_hosts" - bad_hosts = True - else: - self._known_hosts = known_hosts - data = self._get_data() - if not bad_hosts: - return self.async_create_entry(title="Google Cast", data=data) + known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, [])) + return self.async_create_entry( + title="Google Cast", + data=self._get_data(known_hosts=known_hosts), + ) - fields = {} - fields[vol.Optional(CONF_KNOWN_HOSTS, default="")] = str - - return self.async_show_form( - step_id="config", data_schema=vol.Schema(fields), errors=errors - ) + return self.async_show_form(step_id="config", data_schema=KNOWN_HOSTS_SCHEMA) async def async_step_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm the setup.""" - - data = self._get_data() - if user_input is not None or not onboarding.async_is_onboarded(self.hass): - return self.async_create_entry(title="Google Cast", data=data) + return self.async_create_entry(title="Google Cast", data=self._get_data()) return self.async_show_form(step_id="confirm") - def _get_data(self): + def _get_data( + self, *, known_hosts: list[str] | None = None + ) -> dict[str, list[str]]: return { - CONF_IGNORE_CEC: list(self._ignore_cec), - CONF_KNOWN_HOSTS: list(self._known_hosts), - CONF_UUID: list(self._wanted_uuid), + CONF_IGNORE_CEC: [], + CONF_KNOWN_HOSTS: known_hosts or [], + CONF_UUID: [], } @@ -123,31 +109,24 @@ class CastOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage the Google Cast options.""" errors: dict[str, str] = {} - current_config = self.config_entry.data if user_input is not None: - bad_hosts, known_hosts = _string_to_list( - user_input.get(CONF_KNOWN_HOSTS, ""), KNOWN_HOSTS_SCHEMA + known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, [])) + self.updated_config = dict(self.config_entry.data) + self.updated_config[CONF_KNOWN_HOSTS] = known_hosts + + if self.show_advanced_options: + return await self.async_step_advanced_options() + + self.hass.config_entries.async_update_entry( + self.config_entry, data=self.updated_config ) - - if not bad_hosts: - self.updated_config = dict(current_config) - self.updated_config[CONF_KNOWN_HOSTS] = known_hosts - - if self.show_advanced_options: - return await self.async_step_advanced_options() - - self.hass.config_entries.async_update_entry( - self.config_entry, data=self.updated_config - ) - return self.async_create_entry(title="", data={}) - - fields: dict[vol.Marker, type[str]] = {} - suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS)) - _add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value) + return self.async_create_entry(title="", data={}) return self.async_show_form( step_id="basic_options", - data_schema=vol.Schema(fields), + data_schema=self.add_suggested_values_to_schema( + KNOWN_HOSTS_SCHEMA, self.config_entry.data + ), errors=errors, last_step=not self.show_advanced_options, ) @@ -206,6 +185,10 @@ def _string_to_list(string, schema): return invalid, items +def _trim_items(items: list[str]) -> list[str]: + return [x.strip() for x in items if x.strip()] + + def _add_with_suggestion( fields: dict[vol.Marker, type[str]], key: str, suggested_value: str ) -> None: diff --git a/homeassistant/components/cast/const.py b/homeassistant/components/cast/const.py index 056ee054d1d..0a85a0007b3 100644 --- a/homeassistant/components/cast/const.py +++ b/homeassistant/components/cast/const.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, TypedDict +from typing import TYPE_CHECKING, NotRequired, TypedDict from homeassistant.util.signal_type import SignalType @@ -46,3 +46,4 @@ class HomeAssistantControllerData(TypedDict): hass_uuid: str client_id: str | None refresh_token: str + app_id: NotRequired[str] diff --git a/homeassistant/components/cast/helpers.py b/homeassistant/components/cast/helpers.py index 8f4af197b8e..c45bbb4fbbc 100644 --- a/homeassistant/components/cast/helpers.py +++ b/homeassistant/components/cast/helpers.py @@ -7,6 +7,7 @@ from dataclasses import dataclass import logging from typing import TYPE_CHECKING, ClassVar from urllib.parse import urlparse +from uuid import UUID import aiohttp import attr @@ -40,7 +41,7 @@ class ChromecastInfo: is_dynamic_group = attr.ib(type=bool | None, default=None) @property - def friendly_name(self) -> str: + def friendly_name(self) -> str | None: """Return the Friendly Name.""" return self.cast_info.friendly_name @@ -50,7 +51,7 @@ class ChromecastInfo: return self.cast_info.cast_type == CAST_TYPE_GROUP @property - def uuid(self) -> bool: + def uuid(self) -> UUID: """Return the UUID.""" return self.cast_info.uuid @@ -80,7 +81,7 @@ class ChromecastInfo: "+label%3A%22integration%3A+cast%22" ) - _LOGGER.debug( + _LOGGER.info( ( "Fetched cast details for unknown model '%s' manufacturer:" " '%s', type: '%s'. Please %s" @@ -111,7 +112,10 @@ class ChromecastInfo: is_dynamic_group = False http_group_status = None http_group_status = dial.get_multizone_status( - None, + # We pass services which will be used for the HTTP request, and we + # don't care about the host in http_group_status.dynamic_groups so + # we pass an empty string to simplify the code. + "", services=self.cast_info.services, zconf=ChromeCastZeroconf.get_zeroconf(), ) diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index 0650f267544..6c8b0536e2f 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,7 +14,7 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==14.0.5"], + "requirements": ["PyChromecast==14.0.7"], "single_config_entry": true, "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/cast/services.yaml b/homeassistant/components/cast/services.yaml index e2e23ad40a2..45b36f6d983 100644 --- a/homeassistant/components/cast/services.yaml +++ b/homeassistant/components/cast/services.yaml @@ -7,11 +7,11 @@ show_lovelace_view: integration: cast domain: media_player dashboard_path: - required: true example: lovelace-cast selector: text: view_path: + required: true example: downstairs selector: text: diff --git a/homeassistant/components/cast/strings.json b/homeassistant/components/cast/strings.json index 9c49813bd83..8c7c7c0cff0 100644 --- a/homeassistant/components/cast/strings.json +++ b/homeassistant/components/cast/strings.json @@ -6,9 +6,11 @@ }, "config": { "title": "Google Cast configuration", - "description": "Known Hosts - A comma-separated list of hostnames or IP-addresses of cast devices, use if mDNS discovery is not working.", "data": { - "known_hosts": "Known hosts" + "known_hosts": "Add known host" + }, + "data_description": { + "known_hosts": "Hostnames or IP-addresses of cast devices, use if mDNS discovery is not working" } } }, @@ -20,9 +22,11 @@ "step": { "basic_options": { "title": "[%key:component::cast::config::step::config::title%]", - "description": "[%key:component::cast::config::step::config::description%]", "data": { "known_hosts": "[%key:component::cast::config::step::config::data::known_hosts%]" + }, + "data_description": { + "known_hosts": "[%key:component::cast::config::step::config::data_description::known_hosts%]" } }, "advanced_options": { @@ -49,7 +53,7 @@ }, "dashboard_path": { "name": "Dashboard path", - "description": "The URL path of the dashboard to show." + "description": "The URL path of the dashboard to show, defaults to lovelace if not specified." }, "view_path": { "name": "View path", diff --git a/homeassistant/components/chacon_dio/config_flow.py b/homeassistant/components/chacon_dio/config_flow.py index 54604b81153..daaf38e0edc 100644 --- a/homeassistant/components/chacon_dio/config_flow.py +++ b/homeassistant/components/chacon_dio/config_flow.py @@ -44,7 +44,7 @@ class ChaconDioConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except DIOChaconInvalidAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 73952d80f6c..6f18cc424cd 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -245,6 +245,10 @@ class CloudLoginView(HomeAssistantView): name = "api:cloud:login" @require_admin + async def post(self, request: web.Request) -> web.Response: + """Handle login request.""" + return await self._post(request) + @_handle_cloud_errors @RequestDataValidator( vol.Schema( @@ -259,7 +263,7 @@ class CloudLoginView(HomeAssistantView): ) ) ) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle login request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] @@ -316,8 +320,12 @@ class CloudLogoutView(HomeAssistantView): name = "api:cloud:logout" @require_admin - @_handle_cloud_errors async def post(self, request: web.Request) -> web.Response: + """Handle logout request.""" + return await self._post(request) + + @_handle_cloud_errors + async def _post(self, request: web.Request) -> web.Response: """Handle logout request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] @@ -400,9 +408,13 @@ class CloudForgotPasswordView(HomeAssistantView): name = "api:cloud:forgot_password" @require_admin + async def post(self, request: web.Request) -> web.Response: + """Handle forgot password request.""" + return await self._post(request) + @_handle_cloud_errors @RequestDataValidator(vol.Schema({vol.Required("email"): str})) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle forgot password request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 3ac3f3d1c2d..f901adfa99e 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -286,7 +286,7 @@ class CloudTTSEntity(TextToSpeechEntity): return self._language @property - def default_options(self) -> dict[str, Any]: + def default_options(self) -> dict[str, str]: """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, @@ -363,7 +363,7 @@ class CloudTTSEntity(TextToSpeechEntity): _LOGGER.error("Voice error: %s", err) return (None, None) - return (str(options[ATTR_AUDIO_OUTPUT].value), data) + return (options[ATTR_AUDIO_OUTPUT], data) class CloudProvider(Provider): @@ -404,7 +404,7 @@ class CloudProvider(Provider): return [Voice(voice, voice) for voice in voices] @property - def default_options(self) -> dict[str, Any]: + def default_options(self) -> dict[str, str]: """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, @@ -444,7 +444,7 @@ class CloudProvider(Provider): _LOGGER.error("Voice error: %s", err) return (None, None) - return (str(options[ATTR_AUDIO_OUTPUT].value), data) + return options[ATTR_AUDIO_OUTPUT], data @callback diff --git a/homeassistant/components/cloudflare/strings.json b/homeassistant/components/cloudflare/strings.json index 8c8ec57b074..453135f47a0 100644 --- a/homeassistant/components/cloudflare/strings.json +++ b/homeassistant/components/cloudflare/strings.json @@ -4,19 +4,19 @@ "step": { "user": { "title": "Connect to Cloudflare", - "description": "This integration requires an API Token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", + "description": "This integration requires an API token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", "data": { "api_token": "[%key:common::config_flow::data::api_token%]" } }, "zone": { - "title": "Choose the Zone to Update", + "title": "Choose the zone to update", "data": { "zone": "Zone" } }, "records": { - "title": "Choose the Records to Update", + "title": "Choose the records to update", "data": { "records": "Records" } @@ -40,7 +40,7 @@ "services": { "update_records": { "name": "Update records", - "description": "Manually trigger update to Cloudflare records." + "description": "Manually triggers an update of Cloudflare records." } } } diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index 0a01dd957a6..5ecc9a63599 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) AWAY = "away" diff --git a/homeassistant/components/comelit/binary_sensor.py b/homeassistant/components/comelit/binary_sensor.py index c17057d19d1..dfa6d3e97f3 100644 --- a/homeassistant/components/comelit/binary_sensor.py +++ b/homeassistant/components/comelit/binary_sensor.py @@ -16,6 +16,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/comelit/climate.py b/homeassistant/components/comelit/climate.py index 3433d1bdf04..3ec79001d55 100644 --- a/homeassistant/components/comelit/climate.py +++ b/homeassistant/components/comelit/climate.py @@ -21,8 +21,12 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from .const import DOMAIN from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class ClimaComelitMode(StrEnum): """Serial Bridge clima modes.""" @@ -115,13 +119,15 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity # because no serial number or mac is available self._attr_unique_id = f"{config_entry_entry_id}-{device.index}" self._attr_device_info = coordinator.platform_device_info(device, device.type) + self._update_attributes() - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" + def _update_attributes(self) -> None: + """Update class attributes.""" device = self.coordinator.data[CLIMATE][self._device.index] if not isinstance(device.val, list): - raise HomeAssistantError("Invalid clima data") + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="invalid_clima_data" + ) # CLIMATE has a 2 item tuple: # - first for Clima @@ -152,6 +158,12 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity self._attr_target_temperature = values[4] / 10 + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attributes() + super()._handle_coordinator_update() + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" if ( @@ -165,6 +177,8 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity await self.coordinator.api.set_clima_status( self._device.index, ClimaComelitCommand.SET, target_temp ) + self._attr_target_temperature = target_temp + self.async_write_ha_state() async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set hvac mode.""" @@ -176,3 +190,5 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity await self.coordinator.api.set_clima_status( self._device.index, MODE_TO_ACTION[hvac_mode] ) + self._attr_hvac_mode = hvac_mode + self.async_write_ha_state() diff --git a/homeassistant/components/comelit/const.py b/homeassistant/components/comelit/const.py index 84d8fbd6315..f52f33fd6da 100644 --- a/homeassistant/components/comelit/const.py +++ b/homeassistant/components/comelit/const.py @@ -9,3 +9,5 @@ _LOGGER = logging.getLogger(__package__) DOMAIN = "comelit" DEFAULT_PORT = 80 DEVICE_TYPE_LIST = [BRIDGE, VEDO] + +SCAN_INTERVAL = 5 diff --git a/homeassistant/components/comelit/coordinator.py b/homeassistant/components/comelit/coordinator.py index b3be3a47825..df4965d9945 100644 --- a/homeassistant/components/comelit/coordinator.py +++ b/homeassistant/components/comelit/coordinator.py @@ -22,7 +22,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import _LOGGER, DOMAIN +from .const import _LOGGER, DOMAIN, SCAN_INTERVAL type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator] @@ -53,7 +53,7 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]): logger=_LOGGER, config_entry=entry, name=f"{DOMAIN}-{host}-coordinator", - update_interval=timedelta(seconds=5), + update_interval=timedelta(seconds=SCAN_INTERVAL), ) device_registry = dr.async_get(self.hass) device_registry.async_get_or_create( diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 64412569f95..9bcf52ac111 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -15,6 +15,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/comelit/humidifier.py b/homeassistant/components/comelit/humidifier.py index da6d44b1bbe..ad8f49ed5e2 100644 --- a/homeassistant/components/comelit/humidifier.py +++ b/homeassistant/components/comelit/humidifier.py @@ -24,6 +24,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class HumidifierComelitMode(StrEnum): """Serial Bridge humidifier modes.""" @@ -121,13 +124,15 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier self._active_mode = active_mode self._active_action = active_action self._set_command = set_command + self._update_attributes() - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" + def _update_attributes(self) -> None: + """Update class attributes.""" device = self.coordinator.data[CLIMATE][self._device.index] if not isinstance(device.val, list): - raise HomeAssistantError("Invalid clima data") + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="invalid_clima_data" + ) # CLIMATE has a 2 item tuple: # - first for Clima @@ -149,6 +154,12 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier self._attr_mode = MODE_AUTO if _automatic else MODE_NORMAL self._attr_target_humidity = values[4] / 10 + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attributes() + super()._handle_coordinator_update() + async def async_set_humidity(self, humidity: int) -> None: """Set new target humidity.""" if self.mode == HumidifierComelitMode.OFF: @@ -163,12 +174,16 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier await self.coordinator.api.set_humidity_status( self._device.index, HumidifierComelitCommand.SET, humidity ) + self._attr_target_humidity = humidity + self.async_write_ha_state() async def async_set_mode(self, mode: str) -> None: """Set humidifier mode.""" await self.coordinator.api.set_humidity_status( self._device.index, MODE_TO_ACTION[mode] ) + self._attr_mode = mode + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn on.""" diff --git a/homeassistant/components/comelit/light.py b/homeassistant/components/comelit/light.py index 45f4146ece6..09180d628a6 100644 --- a/homeassistant/components/comelit/light.py +++ b/homeassistant/components/comelit/light.py @@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index 8836af4e8dd..3abfc222e7d 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "requirements": ["aiocomelit==0.11.2"] + "requirements": ["aiocomelit==0.11.3"] } diff --git a/homeassistant/components/comelit/sensor.py b/homeassistant/components/comelit/sensor.py index 3d57d9dca9c..c93ccd30eb6 100644 --- a/homeassistant/components/comelit/sensor.py +++ b/homeassistant/components/comelit/sensor.py @@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + SENSOR_BRIDGE_TYPES: Final = ( SensorEntityDescription( key="power", diff --git a/homeassistant/components/comelit/strings.json b/homeassistant/components/comelit/strings.json index 14d947c7323..496d62655a9 100644 --- a/homeassistant/components/comelit/strings.json +++ b/homeassistant/components/comelit/strings.json @@ -3,19 +3,25 @@ "flow_title": "{host}", "step": { "reauth_confirm": { - "description": "Please enter the correct PIN for {host}", "data": { "pin": "[%key:common::config_flow::data::pin%]" + }, + "data_description": { + "pin": "The PIN of your Comelit device." } }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", - "pin": "[%key:common::config_flow::data::pin%]" + "pin": "[%key:common::config_flow::data::pin%]", + "type": "Device type" }, "data_description": { - "host": "The hostname or IP address of your Comelit device." + "host": "The hostname or IP address of your Comelit device.", + "port": "The port of your Comelit device.", + "pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]", + "type": "The type of your Comelit device." } } }, @@ -58,6 +64,9 @@ "exceptions": { "humidity_while_off": { "message": "Cannot change humidity while off" + }, + "invalid_clima_data": { + "message": "Invalid 'clima' data" } } } diff --git a/homeassistant/components/comelit/switch.py b/homeassistant/components/comelit/switch.py index f6e5b192c38..db89bd082f6 100644 --- a/homeassistant/components/comelit/switch.py +++ b/homeassistant/components/comelit/switch.py @@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py index 14c5244c18b..25aaf6df290 100644 --- a/homeassistant/components/conversation/__init__.py +++ b/homeassistant/components/conversation/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable import logging -import re from typing import Literal from hassil.recognize import RecognizeResult @@ -91,8 +90,6 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) -REGEX_TYPE = type(re.compile("")) - SERVICE_PROCESS_SCHEMA = vol.Schema( { vol.Required(ATTR_TEXT): cv.string, diff --git a/homeassistant/components/conversation/chat_log.py b/homeassistant/components/conversation/chat_log.py index 355f423dbb6..cb7b8dd22f7 100644 --- a/homeassistant/components/conversation/chat_log.py +++ b/homeassistant/components/conversation/chat_log.py @@ -8,7 +8,7 @@ from contextlib import contextmanager from contextvars import ContextVar from dataclasses import asdict, dataclass, field, replace import logging -from typing import Literal, TypedDict +from typing import Any, Literal, TypedDict import voluptuous as vol @@ -51,8 +51,7 @@ def async_get_chat_log( ) if user_input is not None and ( (content := chat_log.content[-1]).role != "user" - # MyPy doesn't understand that content is a UserContent here - or content.content != user_input.text # type: ignore[union-attr] + or content.content != user_input.text ): chat_log.async_add_user_content(UserContent(content=user_input.text)) @@ -128,7 +127,7 @@ class ConverseError(HomeAssistantError): class SystemContent: """Base class for chat messages.""" - role: str = field(init=False, default="system") + role: Literal["system"] = field(init=False, default="system") content: str @@ -136,7 +135,7 @@ class SystemContent: class UserContent: """Assistant content.""" - role: str = field(init=False, default="user") + role: Literal["user"] = field(init=False, default="user") content: str @@ -144,7 +143,7 @@ class UserContent: class AssistantContent: """Assistant content.""" - role: str = field(init=False, default="assistant") + role: Literal["assistant"] = field(init=False, default="assistant") agent_id: str content: str | None = None tool_calls: list[llm.ToolInput] | None = None @@ -154,7 +153,7 @@ class AssistantContent: class ToolResultContent: """Tool result content.""" - role: str = field(init=False, default="tool_result") + role: Literal["tool_result"] = field(init=False, default="tool_result") agent_id: str tool_call_id: str tool_name: str @@ -193,8 +192,8 @@ class ChatLog: return ( last_msg.role == "assistant" - and last_msg.content is not None # type: ignore[union-attr] - and last_msg.content.strip().endswith( # type: ignore[union-attr] + and last_msg.content is not None + and last_msg.content.strip().endswith( ( "?", ";", # Greek question mark @@ -457,10 +456,16 @@ class ChatLog: LOGGER.debug("Prompt: %s", self.content) LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None) - trace.async_conversation_trace_append( - trace.ConversationTraceEventType.AGENT_DETAIL, + self.async_trace( { "messages": self.content, "tools": self.llm_api.tools if self.llm_api else None, - }, + } + ) + + def async_trace(self, agent_details: dict[str, Any]) -> None: + """Append agent specific details to the conversation trace.""" + trace.async_conversation_trace_append( + trace.ConversationTraceEventType.AGENT_DETAIL, + agent_details, ) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 3a7aa0c26e8..c30e8bb4a92 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -42,7 +42,6 @@ from homeassistant.components.homeassistant.exposed_entities import ( from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL from homeassistant.helpers import ( area_registry as ar, - chat_session, device_registry as dr, entity_registry as er, floor_registry as fr, @@ -56,7 +55,7 @@ from homeassistant.helpers.event import async_track_state_added_domain from homeassistant.util import language as language_util from homeassistant.util.json import JsonObjectType, json_loads_object -from .chat_log import AssistantContent, async_get_chat_log +from .chat_log import AssistantContent, ChatLog from .const import ( DATA_DEFAULT_ENTITY, DEFAULT_EXPOSED_ATTRIBUTES, @@ -332,49 +331,46 @@ class DefaultAgent(ConversationEntity): return result - async def async_process(self, user_input: ConversationInput) -> ConversationResult: - """Process a sentence.""" + async def _async_handle_message( + self, + user_input: ConversationInput, + chat_log: ChatLog, + ) -> ConversationResult: + """Handle a message.""" response: intent.IntentResponse | None = None - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - # Check if a trigger matched - if trigger_result := await self.async_recognize_sentence_trigger( - user_input - ): - # Process callbacks and get response - response_text = await self._handle_trigger_result( - trigger_result, user_input - ) - # Convert to conversation result - response = intent.IntentResponse( - language=user_input.language or self.hass.config.language - ) - response.response_type = intent.IntentResponseType.ACTION_DONE - response.async_set_speech(response_text) - - if response is None: - # Match intents - intent_result = await self.async_recognize_intent(user_input) - response = await self._async_process_intent_result( - intent_result, user_input - ) - - speech: str = response.speech.get("plain", {}).get("speech", "") - chat_log.async_add_assistant_content_without_tools( - AssistantContent( - agent_id=user_input.agent_id, - content=speech, - ) + # Check if a trigger matched + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + # Process callbacks and get response + response_text = await self._handle_trigger_result( + trigger_result, user_input ) - return ConversationResult( - response=response, conversation_id=session.conversation_id + # Convert to conversation result + response = intent.IntentResponse( + language=user_input.language or self.hass.config.language ) + response.response_type = intent.IntentResponseType.ACTION_DONE + response.async_set_speech(response_text) + + if response is None: + # Match intents + intent_result = await self.async_recognize_intent(user_input) + response = await self._async_process_intent_result( + intent_result, user_input + ) + + speech: str = response.speech.get("plain", {}).get("speech", "") + chat_log.async_add_assistant_content_without_tools( + AssistantContent( + agent_id=user_input.agent_id, + content=speech, + ) + ) + + return ConversationResult( + response=response, conversation_id=chat_log.conversation_id + ) async def _async_process_intent_result( self, diff --git a/homeassistant/components/conversation/entity.py b/homeassistant/components/conversation/entity.py index d9598dee7eb..ca4d18ab9f5 100644 --- a/homeassistant/components/conversation/entity.py +++ b/homeassistant/components/conversation/entity.py @@ -4,9 +4,11 @@ from abc import abstractmethod from typing import Literal, final from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.helpers.chat_session import async_get_chat_session from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util import dt as dt_util +from .chat_log import ChatLog, async_get_chat_log from .const import ConversationEntityFeature from .models import ConversationInput, ConversationResult @@ -51,9 +53,21 @@ class ConversationEntity(RestoreEntity): def supported_languages(self) -> list[str] | Literal["*"]: """Return a list of supported languages.""" - @abstractmethod async def async_process(self, user_input: ConversationInput) -> ConversationResult: """Process a sentence.""" + with ( + async_get_chat_session(self.hass, user_input.conversation_id) as session, + async_get_chat_log(self.hass, session, user_input) as chat_log, + ): + return await self._async_handle_message(user_input, chat_log) + + async def _async_handle_message( + self, + user_input: ConversationInput, + chat_log: ChatLog, + ) -> ConversationResult: + """Call the API.""" + raise NotImplementedError async def async_prepare(self, language: str | None = None) -> None: """Load intents for a language.""" diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index 4d8526a4fd4..efcdcb8d69b 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -3,11 +3,13 @@ from __future__ import annotations from collections.abc import Iterable +from dataclasses import asdict from typing import Any from aiohttp import web from hassil.recognize import MISSING_ENTITY, RecognizeResult from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from home_assistant_intents import get_language_scores import voluptuous as vol from homeassistant.components import http, websocket_api @@ -38,6 +40,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_list_agents) websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_hass_agent_debug) + websocket_api.async_register_command(hass, websocket_hass_agent_language_scores) @websocket_api.websocket_command( @@ -336,6 +339,36 @@ def _get_unmatched_slots( return unmatched_slots +@websocket_api.websocket_command( + { + vol.Required("type"): "conversation/agent/homeassistant/language_scores", + vol.Optional("language"): str, + vol.Optional("country"): str, + } +) +@websocket_api.async_response +async def websocket_hass_agent_language_scores( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get support scores per language.""" + language = msg.get("language", hass.config.language) + country = msg.get("country", hass.config.country) + + scores = await hass.async_add_executor_job(get_language_scores) + matching_langs = language_util.matches(language, scores.keys(), country=country) + preferred_lang = matching_langs[0] if matching_langs else language + result = { + "languages": { + lang_key: asdict(lang_scores) for lang_key, lang_scores in scores.items() + }, + "preferred_language": preferred_lang, + } + + connection.send_result(msg["id"], result) + + class ConversationProcessView(http.HomeAssistantView): """View to process text.""" diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index ea950ace323..acaa2ef0967 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"] + "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.24"] } diff --git a/homeassistant/components/conversation/util.py b/homeassistant/components/conversation/util.py deleted file mode 100644 index 4326c95cb66..00000000000 --- a/homeassistant/components/conversation/util.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Util for Conversation.""" - -from __future__ import annotations - -import re - - -def create_matcher(utterance: str) -> re.Pattern[str]: - """Create a regex that matches the utterance.""" - # Split utterance into parts that are type: NORMAL, GROUP or OPTIONAL - # Pattern matches (GROUP|OPTIONAL): Change light to [the color] {name} - parts = re.split(r"({\w+}|\[[\w\s]+\] *)", utterance) - # Pattern to extract name from GROUP part. Matches {name} - group_matcher = re.compile(r"{(\w+)}") - # Pattern to extract text from OPTIONAL part. Matches [the color] - optional_matcher = re.compile(r"\[([\w ]+)\] *") - - pattern = ["^"] - for part in parts: - group_match = group_matcher.match(part) - optional_match = optional_matcher.match(part) - - # Normal part - if group_match is None and optional_match is None: - pattern.append(part) - continue - - # Group part - if group_match is not None: - pattern.append(rf"(?P<{group_match.groups()[0]}>[\w ]+?)\s*") - - # Optional part - elif optional_match is not None: - pattern.append(rf"(?:{optional_match.groups()[0]} *)?") - - pattern.append("$") - return re.compile("".join(pattern), re.IGNORECASE) diff --git a/homeassistant/components/cover/reproduce_state.py b/homeassistant/components/cover/reproduce_state.py index 307fe5f11bd..de3e0cebfb7 100644 --- a/homeassistant/components/cover/reproduce_state.py +++ b/homeassistant/components/cover/reproduce_state.py @@ -3,12 +3,14 @@ from __future__ import annotations import asyncio -from collections.abc import Iterable +from collections.abc import Coroutine, Iterable +from functools import partial import logging -from typing import Any +from typing import Any, Final from homeassistant.const import ( ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -16,7 +18,8 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, ) -from homeassistant.core import Context, HomeAssistant, State +from homeassistant.core import Context, HomeAssistant, ServiceResponse, State +from homeassistant.util.enum import try_parse_enum from . import ( ATTR_CURRENT_POSITION, @@ -24,17 +27,140 @@ from . import ( ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN, + CoverEntityFeature, CoverState, ) _LOGGER = logging.getLogger(__name__) -VALID_STATES = { - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, - CoverState.OPENING, -} + +OPENING_STATES = {CoverState.OPENING, CoverState.OPEN} +CLOSING_STATES = {CoverState.CLOSING, CoverState.CLOSED} +VALID_STATES: set[CoverState] = OPENING_STATES | CLOSING_STATES + +FULL_OPEN: Final = 100 +FULL_CLOSE: Final = 0 + + +def _determine_features(current_attrs: dict[str, Any]) -> CoverEntityFeature: + """Determine supported features based on current attributes.""" + features = CoverEntityFeature(0) + if ATTR_CURRENT_POSITION in current_attrs: + features |= ( + CoverEntityFeature.SET_POSITION + | CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + ) + if ATTR_CURRENT_TILT_POSITION in current_attrs: + features |= ( + CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT + ) + if features == CoverEntityFeature(0): + features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + return features + + +async def _async_set_position( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + target_position: int, +) -> bool: + """Set the position of the cover. + + Returns True if the position was set, False if there is no + supported method for setting the position. + """ + if target_position == FULL_CLOSE and CoverEntityFeature.CLOSE in features: + await service_call(SERVICE_CLOSE_COVER, service_data) + elif target_position == FULL_OPEN and CoverEntityFeature.OPEN in features: + await service_call(SERVICE_OPEN_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: target_position} + ) + else: + # Requested a position but the cover doesn't support it + return False + return True + + +async def _async_set_tilt_position( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + target_tilt_position: int, +) -> bool: + """Set the tilt position of the cover. + + Returns True if the tilt position was set, False if there is no + supported method for setting the tilt position. + """ + if target_tilt_position == FULL_CLOSE and CoverEntityFeature.CLOSE_TILT in features: + await service_call(SERVICE_CLOSE_COVER_TILT, service_data) + elif target_tilt_position == FULL_OPEN and CoverEntityFeature.OPEN_TILT in features: + await service_call(SERVICE_OPEN_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: target_tilt_position}, + ) + else: + # Requested a tilt position but the cover doesn't support it + return False + return True + + +async def _async_close_cover( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + set_position: bool, + set_tilt: bool, +) -> None: + """Close the cover if it was not closed by setting the position.""" + if not set_position: + if CoverEntityFeature.CLOSE in features: + await service_call(SERVICE_CLOSE_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: FULL_CLOSE} + ) + if not set_tilt: + if CoverEntityFeature.CLOSE_TILT in features: + await service_call(SERVICE_CLOSE_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: FULL_CLOSE}, + ) + + +async def _async_open_cover( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + set_position: bool, + set_tilt: bool, +) -> None: + """Open the cover if it was not opened by setting the position.""" + if not set_position: + if CoverEntityFeature.OPEN in features: + await service_call(SERVICE_OPEN_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: FULL_OPEN} + ) + if not set_tilt: + if CoverEntityFeature.OPEN_TILT in features: + await service_call(SERVICE_OPEN_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: FULL_OPEN}, + ) async def _async_reproduce_state( @@ -45,74 +171,72 @@ async def _async_reproduce_state( reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce a single state.""" - if (cur_state := hass.states.get(state.entity_id)) is None: - _LOGGER.warning("Unable to find entity %s", state.entity_id) + entity_id = state.entity_id + if (cur_state := hass.states.get(entity_id)) is None: + _LOGGER.warning("Unable to find entity %s", entity_id) return - if state.state not in VALID_STATES: - _LOGGER.warning( - "Invalid state specified for %s: %s", state.entity_id, state.state - ) + if (target_state := state.state) not in VALID_STATES: + _LOGGER.warning("Invalid state specified for %s: %s", entity_id, target_state) return + current_attrs = cur_state.attributes + target_attrs = state.attributes + + current_position = current_attrs.get(ATTR_CURRENT_POSITION) + target_position = target_attrs.get(ATTR_CURRENT_POSITION) + position_matches = current_position == target_position + + current_tilt_position = current_attrs.get(ATTR_CURRENT_TILT_POSITION) + target_tilt_position = target_attrs.get(ATTR_CURRENT_TILT_POSITION) + tilt_position_matches = current_tilt_position == target_tilt_position + + state_matches = cur_state.state == target_state # Return if we are already at the right state. - if ( - cur_state.state == state.state - and cur_state.attributes.get(ATTR_CURRENT_POSITION) - == state.attributes.get(ATTR_CURRENT_POSITION) - and cur_state.attributes.get(ATTR_CURRENT_TILT_POSITION) - == state.attributes.get(ATTR_CURRENT_TILT_POSITION) - ): + if state_matches and position_matches and tilt_position_matches: return - service_data = {ATTR_ENTITY_ID: state.entity_id} - service_data_tilting = {ATTR_ENTITY_ID: state.entity_id} + features = try_parse_enum( + CoverEntityFeature, current_attrs.get(ATTR_SUPPORTED_FEATURES) + ) + if features is None: + # Backwards compatibility for integrations that + # don't set supported features since it previously + # worked without it. + _LOGGER.warning("Supported features is not set for %s", entity_id) + features = _determine_features(current_attrs) - if not ( - cur_state.state == state.state - and cur_state.attributes.get(ATTR_CURRENT_POSITION) - == state.attributes.get(ATTR_CURRENT_POSITION) - ): - # Open/Close - if state.state in [CoverState.CLOSED, CoverState.CLOSING]: - service = SERVICE_CLOSE_COVER - elif state.state in [CoverState.OPEN, CoverState.OPENING]: - if ( - ATTR_CURRENT_POSITION in cur_state.attributes - and ATTR_CURRENT_POSITION in state.attributes - ): - service = SERVICE_SET_COVER_POSITION - service_data[ATTR_POSITION] = state.attributes[ATTR_CURRENT_POSITION] - else: - service = SERVICE_OPEN_COVER + service_call = partial( + hass.services.async_call, + DOMAIN, + context=context, + blocking=True, + ) + service_data = {ATTR_ENTITY_ID: entity_id} - await hass.services.async_call( - DOMAIN, service, service_data, context=context, blocking=True + set_position = ( + not position_matches + and target_position is not None + and await _async_set_position( + service_call, service_data, features, target_position + ) + ) + set_tilt = ( + not tilt_position_matches + and target_tilt_position is not None + and await _async_set_tilt_position( + service_call, service_data, features, target_tilt_position + ) + ) + + if target_state in CLOSING_STATES: + await _async_close_cover( + service_call, service_data, features, set_position, set_tilt ) - if ( - ATTR_CURRENT_TILT_POSITION in state.attributes - and ATTR_CURRENT_TILT_POSITION in cur_state.attributes - and cur_state.attributes.get(ATTR_CURRENT_TILT_POSITION) - != state.attributes.get(ATTR_CURRENT_TILT_POSITION) - ): - # Tilt position - if state.attributes.get(ATTR_CURRENT_TILT_POSITION) == 100: - service_tilting = SERVICE_OPEN_COVER_TILT - elif state.attributes.get(ATTR_CURRENT_TILT_POSITION) == 0: - service_tilting = SERVICE_CLOSE_COVER_TILT - else: - service_tilting = SERVICE_SET_COVER_TILT_POSITION - service_data_tilting[ATTR_TILT_POSITION] = state.attributes[ - ATTR_CURRENT_TILT_POSITION - ] - - await hass.services.async_call( - DOMAIN, - service_tilting, - service_data_tilting, - context=context, - blocking=True, + elif target_state in OPENING_STATES: + await _async_open_cover( + service_call, service_data, features, set_position, set_tilt ) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index f794d97a9ba..86fc804ec92 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.8"], + "requirements": ["pydaikin==2.14.1"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 93ae8e392c8..5664e6abc8a 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -7,7 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pydeconz"], - "requirements": ["pydeconz==118"], + "requirements": ["pydeconz==120"], "ssdp": [ { "manufacturer": "Royal Philips Electronics", diff --git a/homeassistant/components/deluge/config_flow.py b/homeassistant/components/deluge/config_flow.py index 19afe26e8f9..78eced64c7c 100644 --- a/homeassistant/components/deluge/config_flow.py +++ b/homeassistant/components/deluge/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from ssl import SSLError from typing import Any @@ -21,6 +22,8 @@ from .const import ( DOMAIN, ) +_LOGGER = logging.getLogger(__name__) + class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Deluge.""" @@ -86,7 +89,8 @@ class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(api.connect) except (ConnectionRefusedError, TimeoutError, SSLError): return "cannot_connect" - except Exception as ex: # noqa: BLE001 + except Exception as ex: + _LOGGER.exception("Unexpected error") if type(ex).__name__ == "BadLoginError": return "invalid_auth" return "unknown" diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 90917e0ce2c..ed6dc94e764 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pydexcom import AccountError, Dexcom, SessionError @@ -12,6 +13,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_SERVER, DOMAIN, SERVER_OUS, SERVER_US +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, @@ -43,7 +46,8 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AccountError: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" if "base" not in errors: diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 9cf63176de6..0a8b7422f84 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -9,7 +9,7 @@ import pydiscovergy.error as discovergyError from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from .coordinator import DiscovergyConfigEntry, DiscovergyUpdateCoordinator @@ -21,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) - client = Discovergy( email=entry.data[CONF_EMAIL], password=entry.data[CONF_PASSWORD], - httpx_client=get_async_client(hass), + httpx_client=create_async_httpx_client(hass), authentication=BasicAuth(), ) diff --git a/homeassistant/components/dormakaba_dkey/config_flow.py b/homeassistant/components/dormakaba_dkey/config_flow.py index 0d23b822231..369accb83d8 100644 --- a/homeassistant/components/dormakaba_dkey/config_flow.py +++ b/homeassistant/components/dormakaba_dkey/config_flow.py @@ -57,7 +57,7 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): self._discovery_info = self._discovered_devices[address] return await self.async_step_associate() - current_addresses = self._async_current_ids() + current_addresses = self._async_current_ids(include_ignore=False) for discovery in async_discovered_service_info(self.hass): if ( discovery.address in current_addresses diff --git a/homeassistant/components/duke_energy/coordinator.py b/homeassistant/components/duke_energy/coordinator.py index 12a2f5fd6ae..a76168475c0 100644 --- a/homeassistant/components/duke_energy/coordinator.py +++ b/homeassistant/components/duke_energy/coordinator.py @@ -8,7 +8,11 @@ from aiodukeenergy import DukeEnergy from aiohttp import ClientError from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -137,7 +141,7 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]): f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}" ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} Consumption", source=DOMAIN, diff --git a/homeassistant/components/dynalite/strings.json b/homeassistant/components/dynalite/strings.json index 468cdebf0b1..4f73f91113b 100644 --- a/homeassistant/components/dynalite/strings.json +++ b/homeassistant/components/dynalite/strings.json @@ -36,7 +36,7 @@ }, "request_channel_level": { "name": "Request channel level", - "description": "Requests Dynalite to report the level of a specific channel.", + "description": "Requests Dynalite to report the brightness level of a specific channel.", "fields": { "host": { "name": "[%key:common::config_flow::data::host%]", @@ -48,7 +48,7 @@ }, "channel": { "name": "Channel", - "description": "Channel to request the level for." + "description": "Channel to request the brightness level for." } } } diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 2b44c45edef..078643ee789 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -25,7 +25,7 @@ "state_attributes": { "preset_mode": { "state": { - "away_indefinitely": "Away Indefinitely" + "away_indefinitely": "Away indefinitely" } } } @@ -91,7 +91,7 @@ }, "fan_min_on_time": { "name": "Fan minimum on time", - "description": "Minimum number of minutes to run the fan each hour (0 to 60) during the vacation." + "description": "Minimum number of minutes to run the fan each hour during the vacation." } } }, @@ -125,7 +125,7 @@ }, "set_fan_min_on_time": { "name": "Set fan minimum on time", - "description": "Sets the minimum fan on time.", + "description": "Sets the minimum amount of time that the fan will run per hour.", "fields": { "entity_id": { "name": "Entity", @@ -133,7 +133,7 @@ }, "fan_min_on_time": { "name": "[%key:component::ecobee::services::create_vacation::fields::fan_min_on_time::name%]", - "description": "New value of fan min on time." + "description": "Minimum number of minutes to run the fan each hour." } } }, diff --git a/homeassistant/components/ecoforest/sensor.py b/homeassistant/components/ecoforest/sensor.py index c1d4aca6f0c..d0e4c17abe1 100644 --- a/homeassistant/components/ecoforest/sensor.py +++ b/homeassistant/components/ecoforest/sensor.py @@ -132,7 +132,7 @@ SENSOR_TYPES: tuple[EcoforestSensorEntityDescription, ...] = ( ), EcoforestSensorEntityDescription( key="convecto_air_flow", - translation_key="convecto_air_flow", + translation_key="convector_air_flow", native_unit_of_measurement=PERCENTAGE, entity_registry_enabled_default=False, value_fn=lambda data: data.convecto_air_flow, diff --git a/homeassistant/components/ecoforest/strings.json b/homeassistant/components/ecoforest/strings.json index 1094e10ada3..d0e807b5f2a 100644 --- a/homeassistant/components/ecoforest/strings.json +++ b/homeassistant/components/ecoforest/strings.json @@ -78,8 +78,8 @@ "extractor": { "name": "Extractor" }, - "convecto_air_flow": { - "name": "Convecto air flow" + "convector_air_flow": { + "name": "Convector air flow" } }, "number": { diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 6d3dc5c9be6..acb5b620719 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==12.4.0"] } diff --git a/homeassistant/components/ecowitt/binary_sensor.py b/homeassistant/components/ecowitt/binary_sensor.py index a2ed279f601..1d36f5232db 100644 --- a/homeassistant/components/ecowitt/binary_sensor.py +++ b/homeassistant/components/ecowitt/binary_sensor.py @@ -26,6 +26,9 @@ ECOWITT_BINARYSENSORS_MAPPING: Final = { device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, ), + EcoWittSensorTypes.RAIN_STATE: BinarySensorEntityDescription( + key="RAIN_STATE", device_class=BinarySensorDeviceClass.MOISTURE + ), } diff --git a/homeassistant/components/ecowitt/sensor.py b/homeassistant/components/ecowitt/sensor.py index 6968acdfa4f..7d37aa40b86 100644 --- a/homeassistant/components/ecowitt/sensor.py +++ b/homeassistant/components/ecowitt/sensor.py @@ -68,6 +68,7 @@ ECOWITT_SENSORS_MAPPING: Final = { key="DEGREE", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), EcoWittSensorTypes.WATT_METERS_SQUARED: SensorEntityDescription( key="WATT_METERS_SQUARED", diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py index c6535608b0c..b0432267c8e 100644 --- a/homeassistant/components/eheimdigital/config_flow.py +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -62,6 +62,7 @@ class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): except (ClientError, TimeoutError): return self.async_abort(reason="cannot_connect") except Exception: # noqa: BLE001 + LOGGER.exception("Unknown exception occurred") return self.async_abort(reason="unknown") await self.async_set_unique_id(hub.main.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) diff --git a/homeassistant/components/elkm1/__init__.py b/homeassistant/components/elkm1/__init__.py index 5286b7ad66f..4bf51b99de1 100644 --- a/homeassistant/components/elkm1/__init__.py +++ b/homeassistant/components/elkm1/__init__.py @@ -101,9 +101,11 @@ def hostname_from_url(url: str) -> str: def _host_validator(config: dict[str, str]) -> dict[str, str]: """Validate that a host is properly configured.""" - if config[CONF_HOST].startswith("elks://"): + if config[CONF_HOST].startswith(("elks://", "elksv1_2://")): if CONF_USERNAME not in config or CONF_PASSWORD not in config: - raise vol.Invalid("Specify username and password for elks://") + raise vol.Invalid( + "Specify username and password for elks:// or elksv1_2://" + ) elif not config[CONF_HOST].startswith("elk://") and not config[ CONF_HOST ].startswith("serial://"): diff --git a/homeassistant/components/elvia/importer.py b/homeassistant/components/elvia/importer.py index 4e8b7f716ef..caca787237c 100644 --- a/homeassistant/components/elvia/importer.py +++ b/homeassistant/components/elvia/importer.py @@ -7,7 +7,11 @@ from typing import TYPE_CHECKING, cast from elvia import Elvia, error as ElviaError -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -144,7 +148,7 @@ class ElviaImporter: async_add_external_statistics( hass=self.hass, metadata=StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{self.metering_point_id} Consumption", source=DOMAIN, diff --git a/homeassistant/components/energenie_power_sockets/strings.json b/homeassistant/components/energenie_power_sockets/strings.json index 4e4e49c68fb..bd536568d2c 100644 --- a/homeassistant/components/energenie_power_sockets/strings.json +++ b/homeassistant/components/energenie_power_sockets/strings.json @@ -1,5 +1,5 @@ { - "title": "Energenie Power Sockets Integration.", + "title": "Energenie Power Sockets", "config": { "step": { "user": { diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index ff86177cf41..442aedf23b0 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -139,6 +139,10 @@ class DeviceConsumption(TypedDict): # An optional custom name for display in energy graphs name: str | None + # An optional statistic_id identifying a device + # that includes this device's consumption in its total + included_in_stat: str | None + class EnergyPreferences(TypedDict): """Dictionary holding the energy data.""" @@ -291,6 +295,7 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema( { vol.Required("stat_consumption"): str, vol.Optional("name"): str, + vol.Optional("included_in_stat"): str, } ) diff --git a/homeassistant/components/energy/strings.json b/homeassistant/components/energy/strings.json index e9d72247319..5eb2c93161e 100644 --- a/homeassistant/components/energy/strings.json +++ b/homeassistant/components/energy/strings.json @@ -7,7 +7,7 @@ }, "recorder_untracked": { "title": "Entity not tracked", - "description": "The recorder has been configured to exclude these configured entities:" + "description": "Home Assistant Recorder has been configured to exclude these configured entities:" }, "entity_unavailable": { "title": "Entity unavailable", diff --git a/homeassistant/components/energyzero/strings.json b/homeassistant/components/energyzero/strings.json index 7788f4d4d8e..48682ab31ee 100644 --- a/homeassistant/components/energyzero/strings.json +++ b/homeassistant/components/energyzero/strings.json @@ -54,10 +54,10 @@ "services": { "get_gas_prices": { "name": "Get gas prices", - "description": "Request gas prices from EnergyZero.", + "description": "Requests gas prices from EnergyZero.", "fields": { "config_entry": { - "name": "Config Entry", + "name": "Config entry", "description": "The config entry to use for this action." }, "incl_vat": { @@ -76,7 +76,7 @@ }, "get_energy_prices": { "name": "Get energy prices", - "description": "Request energy prices from EnergyZero.", + "description": "Requests energy prices from EnergyZero.", "fields": { "config_entry": { "name": "[%key:component::energyzero::services::get_gas_prices::fields::config_entry::name%]", diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index b0649a8368d..876d55128cf 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Enigma2.""" +import logging from typing import Any, cast from aiohttp.client_exceptions import ClientError @@ -63,6 +64,8 @@ CONFIG_SCHEMA = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + async def get_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: """Get the options schema.""" @@ -130,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors = {"base": "invalid_auth"} except ClientError: errors = {"base": "cannot_connect"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "unknown"} else: unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index b498c59e0d3..ce3a8593226 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -187,13 +187,13 @@ "name": "Lifetime energy consumption {phase_name}" }, "balanced_net_consumption": { - "name": "balanced net power consumption" + "name": "Balanced net power consumption" }, "lifetime_balanced_net_consumption": { "name": "Lifetime balanced net energy consumption" }, "balanced_net_consumption_phase": { - "name": "balanced net power consumption {phase_name}" + "name": "Balanced net power consumption {phase_name}" }, "lifetime_balanced_net_consumption_phase": { "name": "Lifetime balanced net energy consumption {phase_name}" @@ -217,7 +217,7 @@ "name": "Net consumption CT current" }, "net_ct_powerfactor": { - "name": "Powerfactor net consumption CT" + "name": "Power factor net consumption CT" }, "net_ct_metering_status": { "name": "Metering status net consumption CT" @@ -235,7 +235,7 @@ "name": "Production CT current" }, "production_ct_powerfactor": { - "name": "powerfactor production CT" + "name": "Power factor production CT" }, "production_ct_metering_status": { "name": "Metering status production CT" @@ -262,7 +262,7 @@ "name": "Storage CT current" }, "storage_ct_powerfactor": { - "name": "Powerfactor storage CT" + "name": "Power factor storage CT" }, "storage_ct_metering_status": { "name": "Metering status storage CT" @@ -289,7 +289,7 @@ "name": "Net consumption CT current {phase_name}" }, "net_ct_powerfactor_phase": { - "name": "Powerfactor net consumption CT {phase_name}" + "name": "Power factor net consumption CT {phase_name}" }, "net_ct_metering_status_phase": { "name": "Metering status net consumption CT {phase_name}" @@ -307,7 +307,7 @@ "name": "Production CT current {phase_name}" }, "production_ct_powerfactor_phase": { - "name": "Powerfactor production CT {phase_name}" + "name": "Power factor production CT {phase_name}" }, "production_ct_metering_status_phase": { "name": "Metering status production CT {phase_name}" @@ -334,7 +334,7 @@ "name": "Storage CT current {phase_name}" }, "storage_ct_powerfactor_phase": { - "name": "Powerfactor storage CT {phase_name}" + "name": "Power factor storage CT {phase_name}" }, "storage_ct_metering_status_phase": { "name": "Metering status storage CT {phase_name}" diff --git a/homeassistant/components/environment_canada/sensor.py b/homeassistant/components/environment_canada/sensor.py index 3a789289c74..1685888d2bc 100644 --- a/homeassistant/components/environment_canada/sensor.py +++ b/homeassistant/components/environment_canada/sensor.py @@ -168,6 +168,7 @@ SENSOR_TYPES: tuple[ECSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, value_fn=lambda data: data.conditions.get("wind_bearing", {}).get("value"), device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ECSensorEntityDescription( key="wind_chill", diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index 4b65852d205..ab62c962982 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["eq3btsmart"], - "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.11.0"] + "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.12.0"] } diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index fdd16d20d77..a129a7723dd 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -253,6 +253,11 @@ class EsphomeAssistSatellite( # Will use media player for TTS/announcements self._update_tts_format() + if feature_flags & VoiceAssistantFeature.START_CONVERSATION: + self._attr_supported_features |= ( + assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION + ) + # Update wake word select when config is updated self.async_on_remove( self.entry_data.async_register_assist_satellite_set_wake_word_callback( @@ -342,14 +347,33 @@ class EsphomeAssistSatellite( Should block until the announcement is done playing. """ + await self._do_announce(announcement, run_pipeline_after=False) + + async def async_start_conversation( + self, start_announcement: assist_satellite.AssistSatelliteAnnouncement + ) -> None: + """Start a conversation from the satellite.""" + await self._do_announce(start_announcement, run_pipeline_after=True) + + async def _do_announce( + self, + announcement: assist_satellite.AssistSatelliteAnnouncement, + run_pipeline_after: bool, + ) -> None: + """Announce media on the satellite. + + Optionally run a voice pipeline after the announcement has finished. + """ _LOGGER.debug( "Waiting for announcement to finished (message=%s, media_id=%s)", announcement.message, announcement.media_id, ) media_id = announcement.media_id - if announcement.media_id_source != "tts": - # Route non-TTS media through the proxy + is_media_tts = announcement.media_id_source == "tts" + preannounce_media_id = announcement.preannounce_media_id + if (not is_media_tts) or preannounce_media_id: + # Route media through the proxy format_to_use: MediaPlayerSupportedFormat | None = None for supported_format in chain( *self.entry_data.media_player_formats.values() @@ -362,19 +386,33 @@ class EsphomeAssistSatellite( assert (self.registry_entry is not None) and ( self.registry_entry.device_id is not None ) - proxy_url = async_create_proxy_url( - self.hass, - self.registry_entry.device_id, - media_id, + + make_proxy_url = partial( + async_create_proxy_url, + hass=self.hass, + device_id=self.registry_entry.device_id, media_format=format_to_use.format, rate=format_to_use.sample_rate or None, channels=format_to_use.num_channels or None, width=format_to_use.sample_bytes or None, ) - media_id = async_process_play_media_url(self.hass, proxy_url) + + if not is_media_tts: + media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=media_id) + ) + + if preannounce_media_id: + preannounce_media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=preannounce_media_id) + ) await self.cli.send_voice_assistant_announcement_await_response( - media_id, _ANNOUNCEMENT_TIMEOUT_SEC, announcement.message + media_id, + _ANNOUNCEMENT_TIMEOUT_SEC, + announcement.message, + start_conversation=run_pipeline_after, + preannounce_media_id=preannounce_media_id or "", ) async def handle_pipeline_start( diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 25d9e407044..075185dffbb 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -16,9 +16,9 @@ "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], "requirements": [ - "aioesphomeapi==29.4.1", + "aioesphomeapi==29.7.0", "esphome-dashboard-api==1.2.3", - "bleak-esphome==2.11.0" + "bleak-esphome==2.12.0" ], "zeroconf": ["_esphomelib._tcp.local."] } diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index 1534a49e365..c6916a3636d 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -4,7 +4,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "mdns_missing_mac": "Missing MAC address in MDNS properties.", + "mdns_missing_mac": "Missing MAC address in mDNS properties.", "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", diff --git a/homeassistant/components/everlights/light.py b/homeassistant/components/everlights/light.py index ae159d77240..c153f01e83c 100644 --- a/homeassistant/components/everlights/light.py +++ b/homeassistant/components/everlights/light.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any +from typing import Any, cast import pyeverlights import voluptuous as vol @@ -84,7 +84,7 @@ class EverLightsLight(LightEntity): api: pyeverlights.EverLights, channel: int, status: dict[str, Any], - effects, + effects: list[str], ) -> None: """Initialize the light.""" self._api = api @@ -106,8 +106,10 @@ class EverLightsLight(LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) - brightness = kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)) effect = kwargs.get(ATTR_EFFECT) if effect is not None: @@ -116,7 +118,7 @@ class EverLightsLight(LightEntity): rgb = color_int_to_rgb(colors[0]) hsv = color_util.color_RGB_to_hsv(*rgb) hs_color = hsv[:2] - brightness = hsv[2] / 100 * 255 + brightness = round(hsv[2] / 100 * 255) else: rgb = color_util.color_hsv_to_RGB( diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index 700872ef92b..44e4cdb1128 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_polling", "loggers": ["evohome", "evohomeasync", "evohomeasync2"], "quality_scale": "legacy", - "requirements": ["evohome-async==1.0.3"] + "requirements": ["evohome-async==1.0.4"] } diff --git a/homeassistant/components/ezviz/strings.json b/homeassistant/components/ezviz/strings.json index f1653661cdd..cd8bbc9d199 100644 --- a/homeassistant/components/ezviz/strings.json +++ b/homeassistant/components/ezviz/strings.json @@ -54,7 +54,7 @@ "init": { "data": { "timeout": "Request timeout (seconds)", - "ffmpeg_arguments": "Arguments passed to ffmpeg for cameras" + "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras" } } } diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index 3132aadbda8..35022e82bb1 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -36,7 +36,7 @@ "issues": { "import_yaml_error_url_error": { "title": "The Feedreader YAML configuration import failed", - "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." + "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that the URL is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." } } } diff --git a/homeassistant/components/ffmpeg/strings.json b/homeassistant/components/ffmpeg/strings.json index 66c1f19de5b..cac7fcfc48c 100644 --- a/homeassistant/components/ffmpeg/strings.json +++ b/homeassistant/components/ffmpeg/strings.json @@ -2,7 +2,7 @@ "services": { "restart": { "name": "[%key:common::action::restart%]", - "description": "Sends a restart command to a ffmpeg based sensor.", + "description": "Sends a restart command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", @@ -12,7 +12,7 @@ }, "start": { "name": "[%key:common::action::start%]", - "description": "Sends a start command to a ffmpeg based sensor.", + "description": "Sends a start command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", @@ -22,7 +22,7 @@ }, "stop": { "name": "[%key:common::action::stop%]", - "description": "Sends a stop command to a ffmpeg based sensor.", + "description": "Sends a stop command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 318325dbb09..f5188d5bf21 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections import namedtuple from datetime import timedelta import logging -from typing import Any +from typing import Any, cast from fints.client import FinTS3PinTanClient from fints.models import SEPAAccount @@ -73,7 +73,7 @@ def setup_platform( credentials = BankCredentials( config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL] ) - fints_name = config.get(CONF_NAME, config[CONF_BIN]) + fints_name = cast(str, config.get(CONF_NAME, config[CONF_BIN])) account_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS] diff --git a/homeassistant/components/flo/strings.json b/homeassistant/components/flo/strings.json index 3444911fbd4..64e22bedec3 100644 --- a/homeassistant/components/flo/strings.json +++ b/homeassistant/components/flo/strings.json @@ -60,11 +60,11 @@ "fields": { "sleep_minutes": { "name": "Sleep minutes", - "description": "The time to sleep in minutes." + "description": "The duration to sleep in minutes." }, "revert_to_mode": { "name": "Revert to mode", - "description": "The mode to revert to after sleep_minutes has elapsed." + "description": "The mode to revert to after the 'Sleep minutes' duration has elapsed." } } }, @@ -78,7 +78,7 @@ }, "run_health_test": { "name": "Run health test", - "description": "Have the Flo device run a health test." + "description": "Requests the Flo device to run a health test." } } } diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index c7c2439b566..e0df30875bc 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -89,7 +89,7 @@ "message": "Can't change preset while holiday or summer mode is active on the device." }, "change_hvac_while_active_mode": { - "message": "Can't change hvac mode while holiday or summer mode is active on the device." + "message": "Can't change HVAC mode while holiday or summer mode is active on the device." } } } diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index f35c9ce5bc1..b8aa2da81c6 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -149,7 +149,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): unique_id, info = await validate_host(self.hass, user_input[CONF_HOST]) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index 6184d888004..9a0627f9f42 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -52,10 +52,9 @@ CONF_JS_VERSION = "javascript_version" DEFAULT_THEME_COLOR = "#03A9F4" -DATA_PANELS = "frontend_panels" -DATA_JS_VERSION = "frontend_js_version" -DATA_EXTRA_MODULE_URL = "frontend_extra_module_url" -DATA_EXTRA_JS_URL_ES5 = "frontend_extra_js_url_es5" +DATA_PANELS: HassKey[dict[str, Panel]] = HassKey("frontend_panels") +DATA_EXTRA_MODULE_URL: HassKey[UrlManager] = HassKey("frontend_extra_module_url") +DATA_EXTRA_JS_URL_ES5: HassKey[UrlManager] = HassKey("frontend_extra_js_url_es5") DATA_WS_SUBSCRIBERS: HassKey[set[tuple[websocket_api.ActiveConnection, int]]] = HassKey( "frontend_ws_subscribers" @@ -64,8 +63,8 @@ DATA_WS_SUBSCRIBERS: HassKey[set[tuple[websocket_api.ActiveConnection, int]]] = THEMES_STORAGE_KEY = f"{DOMAIN}_theme" THEMES_STORAGE_VERSION = 1 THEMES_SAVE_DELAY = 60 -DATA_THEMES_STORE = "frontend_themes_store" -DATA_THEMES = "frontend_themes" +DATA_THEMES_STORE: HassKey[Store] = HassKey("frontend_themes_store") +DATA_THEMES: HassKey[dict[str, Any]] = HassKey("frontend_themes") DATA_DEFAULT_THEME = "frontend_default_theme" DATA_DEFAULT_DARK_THEME = "frontend_default_dark_theme" DEFAULT_THEME = "default" @@ -242,7 +241,7 @@ class Panel: sidebar_title: str | None = None # Url to show the panel in the frontend - frontend_url_path: str | None = None + frontend_url_path: str # Config to pass to the webcomponent config: dict[str, Any] | None = None @@ -273,7 +272,7 @@ class Panel: self.config_panel_domain = config_panel_domain @callback - def to_response(self) -> PanelRespons: + def to_response(self) -> PanelResponse: """Panel as dictionary.""" return { "component_name": self.component_name, @@ -631,7 +630,8 @@ class IndexView(web_urldispatcher.AbstractResource): def get_info(self) -> dict[str, list[str]]: # type: ignore[override] """Return a dict with additional info useful for introspection.""" - return {"panels": list(self.hass.data[DATA_PANELS])} + panels = self.hass.data[DATA_PANELS] + return {"panels": list(panels)} def raw_match(self, path: str) -> bool: """Perform a raw match against path.""" @@ -841,13 +841,13 @@ def websocket_subscribe_extra_js( connection.send_message(websocket_api.result_message(msg["id"])) -class PanelRespons(TypedDict): +class PanelResponse(TypedDict): """Represent the panel response type.""" component_name: str icon: str | None title: str | None config: dict[str, Any] | None - url_path: str | None + url_path: str require_admin: bool config_panel_domain: str | None diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index b210fdb6661..b78323488ae 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20250306.0"] + "requirements": ["home-assistant-frontend==20250326.0"] } diff --git a/homeassistant/components/frontend/storage.py b/homeassistant/components/frontend/storage.py index cbcc3024aa7..a33a9de7ac5 100644 --- a/homeassistant/components/frontend/storage.py +++ b/homeassistant/components/frontend/storage.py @@ -12,8 +12,11 @@ from homeassistant.components import websocket_api from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.storage import Store +from homeassistant.util.hass_dict import HassKey -DATA_STORAGE = "frontend_storage" +DATA_STORAGE: HassKey[tuple[dict[str, Store], dict[str, dict]]] = HassKey( + "frontend_storage" +) STORAGE_VERSION_USER_DATA = 1 diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index f6514da28ff..dc4f6bea989 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -108,8 +108,8 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): self._webfsapi_url = await AFSAPI.get_webfsapi_endpoint(device_url) except FSConnectionError: return self.async_abort(reason="cannot_connect") - except Exception as exception: # noqa: BLE001 - _LOGGER.debug(exception) + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") # try to login with default pin diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index c4b097ff0de..9369fd7b7cd 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -62,7 +62,7 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AylaAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json index 330685f89fc..c8fed9b45c9 100644 --- a/homeassistant/components/fujitsu_fglair/manifest.json +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", "iot_class": "cloud_polling", - "requirements": ["ayla-iot-unofficial==1.4.5"] + "requirements": ["ayla-iot-unofficial==1.4.7"] } diff --git a/homeassistant/components/fujitsu_fglair/sensor.py b/homeassistant/components/fujitsu_fglair/sensor.py index 0ad5bec3117..3bb693e1068 100644 --- a/homeassistant/components/fujitsu_fglair/sensor.py +++ b/homeassistant/components/fujitsu_fglair/sensor.py @@ -24,6 +24,7 @@ async def async_setup_entry( async_add_entities( FGLairOutsideTemperature(entry.runtime_data, device) for device in entry.runtime_data.data.values() + if device.outdoor_temperature is not None ) diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index 78cb7647785..9c5ab1de405 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -65,8 +65,8 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): return {"base": "invalid_auth"} except FytaPasswordError: return {"base": "invalid_auth", CONF_PASSWORD: "password_error"} - except Exception as e: # noqa: BLE001 - _LOGGER.error(e) + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} finally: await fyta.client.close() diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index ea628f55c6c..615197203a8 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_polling", "loggers": ["fyta_cli"], "quality_scale": "platinum", - "requirements": ["fyta_cli==0.7.0"] + "requirements": ["fyta_cli==0.7.2"] } diff --git a/homeassistant/components/gardena_bluetooth/config_flow.py b/homeassistant/components/gardena_bluetooth/config_flow.py index c7631b62f47..613d0cf21db 100644 --- a/homeassistant/components/gardena_bluetooth/config_flow.py +++ b/homeassistant/components/gardena_bluetooth/config_flow.py @@ -41,6 +41,8 @@ def _is_supported(discovery_info: BluetoothServiceInfo): ProductType.PUMP, ProductType.VALVE, ProductType.WATER_COMPUTER, + ProductType.AUTOMATS, + ProductType.PRESSURE_TANKS, ): _LOGGER.debug("Unsupported device: %s", manufacturer_data) return False diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 28bba1015f5..8c9cda7d998 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.5.0"] + "requirements": ["gardena-bluetooth==1.6.0"] } diff --git a/homeassistant/components/gios/__init__.py b/homeassistant/components/gios/__init__.py index f756980f5d0..31f704fcacc 100644 --- a/homeassistant/components/gios/__init__.py +++ b/homeassistant/components/gios/__init__.py @@ -44,7 +44,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: GiosConfigEntry) -> bool try: gios = await Gios.create(websession, station_id) except (GiosError, ConnectionError, ClientConnectorError) as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err coordinator = GiosDataUpdateCoordinator(hass, entry, gios) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/gios/config_flow.py b/homeassistant/components/gios/config_flow.py index ecd0baee6f9..9b242a8cc99 100644 --- a/homeassistant/components/gios/config_flow.py +++ b/homeassistant/components/gios/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp.client_exceptions import ClientConnectorError from gios import ApiError, Gios, InvalidSensorsDataError, NoStationError @@ -12,6 +12,12 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) from .const import API_TIMEOUT, CONF_STATION_ID, DOMAIN @@ -27,40 +33,59 @@ class GiosFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors = {} + websession = async_get_clientsession(self.hass) + if user_input is not None: + station_id = user_input[CONF_STATION_ID] + try: - await self.async_set_unique_id( - str(user_input[CONF_STATION_ID]), raise_on_progress=False - ) + await self.async_set_unique_id(station_id, raise_on_progress=False) self._abort_if_unique_id_configured() - websession = async_get_clientsession(self.hass) - async with asyncio.timeout(API_TIMEOUT): - gios = await Gios.create(websession, user_input[CONF_STATION_ID]) + gios = await Gios.create(websession, int(station_id)) await gios.async_update() - assert gios.station_name is not None + # GIOS treats station ID as int + user_input[CONF_STATION_ID] = int(station_id) + + if TYPE_CHECKING: + assert gios.station_name is not None + return self.async_create_entry( title=gios.station_name, data=user_input, ) except (ApiError, ClientConnectorError, TimeoutError): errors["base"] = "cannot_connect" - except NoStationError: - errors[CONF_STATION_ID] = "wrong_station_id" except InvalidSensorsDataError: errors[CONF_STATION_ID] = "invalid_sensors_data" + try: + gios = await Gios.create(websession) + except (ApiError, ClientConnectorError, NoStationError): + return self.async_abort(reason="cannot_connect") + + options: list[SelectOptionDict] = [ + SelectOptionDict(value=str(station.id), label=station.name) + for station in gios.measurement_stations.values() + ] + + schema: vol.Schema = vol.Schema( + { + vol.Required(CONF_STATION_ID): SelectSelector( + SelectSelectorConfig( + options=options, + sort=True, + mode=SelectSelectorMode.DROPDOWN, + ), + ), + vol.Optional(CONF_NAME, default=self.hass.config.location_name): str, + } + ) + return self.async_show_form( step_id="user", - data_schema=vol.Schema( - { - vol.Required(CONF_STATION_ID): int, - vol.Optional( - CONF_NAME, default=self.hass.config.location_name - ): str, - } - ), + data_schema=schema, errors=errors, ) diff --git a/homeassistant/components/gios/coordinator.py b/homeassistant/components/gios/coordinator.py index 95f3b8af797..eb0dd82eb67 100644 --- a/homeassistant/components/gios/coordinator.py +++ b/homeassistant/components/gios/coordinator.py @@ -57,4 +57,11 @@ class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]): async with asyncio.timeout(API_TIMEOUT): return await self.gios.async_update() except (GiosError, ClientConnectorError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(error), + }, + ) from error diff --git a/homeassistant/components/gios/strings.json b/homeassistant/components/gios/strings.json index fc82f1c843d..eca23159a13 100644 --- a/homeassistant/components/gios/strings.json +++ b/homeassistant/components/gios/strings.json @@ -5,17 +5,17 @@ "title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)", "data": { "name": "[%key:common::config_flow::data::name%]", - "station_id": "ID of the measuring station" + "station_id": "Measuring station" } } }, "error": { - "wrong_station_id": "ID of the measuring station is not correct.", "invalid_sensors_data": "Invalid sensors' data for this measuring station.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_location%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "system_health": { @@ -170,5 +170,13 @@ } } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the GIOS API for {entry}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the GIOS API for {entry}: {error}" + } } } diff --git a/homeassistant/components/go2rtc/const.py b/homeassistant/components/go2rtc/const.py index 234411936cb..491b2269043 100644 --- a/homeassistant/components/go2rtc/const.py +++ b/homeassistant/components/go2rtc/const.py @@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui" DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time." HA_MANAGED_API_PORT = 11984 HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/" -RECOMMENDED_VERSION = "1.9.8" +RECOMMENDED_VERSION = "1.9.9" diff --git a/homeassistant/components/gogogate2/config_flow.py b/homeassistant/components/gogogate2/config_flow.py index 0348d0b428c..cebff656d5d 100644 --- a/homeassistant/components/gogogate2/config_flow.py +++ b/homeassistant/components/gogogate2/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import dataclasses +import logging import re from typing import Any, Self @@ -27,6 +28,8 @@ from homeassistant.helpers.service_info.zeroconf import ( from .common import get_api from .const import DEVICE_TYPE_GOGOGATE2, DEVICE_TYPE_ISMARTGATE, DOMAIN +_LOGGER = logging.getLogger(__name__) + DEVICE_NAMES = { DEVICE_TYPE_GOGOGATE2: "Gogogate2", DEVICE_TYPE_ISMARTGATE: "ismartgate", @@ -115,7 +118,8 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN): else: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" if self._ip_address and self._device_type: diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 4f8ffba1d19..a62d2bf1d6b 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -89,6 +89,7 @@ OPAQUE = "opaque" RRULE_PREFIX = "RRULE:" SERVICE_CREATE_EVENT = "create_event" +FILTERED_EVENT_TYPES = [EventTypeEnum.BIRTHDAY, EventTypeEnum.WORKING_LOCATION] @dataclasses.dataclass(frozen=True, kw_only=True) @@ -103,7 +104,7 @@ class GoogleCalendarEntityDescription(CalendarEntityDescription): search: str | None local_sync: bool device_id: str - working_location: bool = False + event_type: EventTypeEnum | None = None def _get_entity_descriptions( @@ -173,14 +174,24 @@ def _get_entity_descriptions( local_sync, ) if calendar_item.primary and local_sync: - _LOGGER.debug("work location entity") + # Create a separate calendar for birthdays + entity_descriptions.append( + dataclasses.replace( + entity_description, + key=f"{key}-birthdays", + translation_key="birthdays", + event_type=EventTypeEnum.BIRTHDAY, + name=None, + entity_id=None, + ) + ) # Create an optional disabled by default entity for Work Location entity_descriptions.append( dataclasses.replace( entity_description, key=f"{key}-work-location", translation_key="working_location", - working_location=True, + event_type=EventTypeEnum.WORKING_LOCATION, name=None, entity_id=None, entity_registry_enabled_default=False, @@ -383,9 +394,18 @@ class GoogleCalendarEntity( for attendee in event.attendees ): return False - - if event.event_type == EventTypeEnum.WORKING_LOCATION: - return self.entity_description.working_location + # Calendar enttiy may be limited to a specific event type + if ( + self.entity_description.event_type is not None + and self.entity_description.event_type != event.event_type + ): + return False + # Default calendar entity omits the special types but includes all the others + if ( + self.entity_description.event_type is None + and event.event_type in FILTERED_EVENT_TYPES + ): + return False if self._ignore_availability: return True return event.transparency == OPAQUE diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index bd04597e513..81fd2b07de4 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==8.3.0"] + "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.1"] } diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 5ee0cdd9c14..5776fd0480b 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -131,6 +131,9 @@ "calendar": { "working_location": { "name": "Working location" + }, + "birthdays": { + "name": "Birthdays" } } } diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 8132ecaae2c..71738c9d13e 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -14,6 +14,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -58,6 +59,7 @@ DEFAULT_EXPOSED_DOMAINS = [ "humidifier", "input_boolean", "input_select", + "lawn_mower", "light", "lock", "media_player", @@ -88,6 +90,7 @@ TYPE_GATE = f"{PREFIX_TYPES}GATE" TYPE_HUMIDIFIER = f"{PREFIX_TYPES}HUMIDIFIER" TYPE_LIGHT = f"{PREFIX_TYPES}LIGHT" TYPE_LOCK = f"{PREFIX_TYPES}LOCK" +TYPE_MOWER = f"{PREFIX_TYPES}MOWER" TYPE_OUTLET = f"{PREFIX_TYPES}OUTLET" TYPE_RECEIVER = f"{PREFIX_TYPES}AUDIO_VIDEO_RECEIVER" TYPE_SCENE = f"{PREFIX_TYPES}SCENE" @@ -149,6 +152,7 @@ DOMAIN_TO_GOOGLE_TYPES = { input_boolean.DOMAIN: TYPE_SWITCH, input_button.DOMAIN: TYPE_SCENE, input_select.DOMAIN: TYPE_SENSOR, + lawn_mower.DOMAIN: TYPE_MOWER, light.DOMAIN: TYPE_LIGHT, lock.DOMAIN: TYPE_LOCK, media_player.DOMAIN: TYPE_SETTOP, diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 44251a3be04..9edd340d7d9 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -21,6 +21,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -42,6 +43,7 @@ from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature from homeassistant.components.fan import FanEntityFeature from homeassistant.components.humidifier import HumidifierEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.light import LightEntityFeature from homeassistant.components.lock import LockState from homeassistant.components.media_player import MediaPlayerEntityFeature, MediaType @@ -714,7 +716,7 @@ class DockTrait(_Trait): @staticmethod def supported(domain, features, device_class, _): """Test if state is supported.""" - return domain == vacuum.DOMAIN + return domain in (vacuum.DOMAIN, lawn_mower.DOMAIN) def sync_attributes(self) -> dict[str, Any]: """Return dock attributes for a sync request.""" @@ -722,17 +724,32 @@ class DockTrait(_Trait): def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" - return {"isDocked": self.state.state == vacuum.VacuumActivity.DOCKED} + domain = self.state.domain + state = self.state.state + if domain == vacuum.DOMAIN: + return {"isDocked": state == vacuum.VacuumActivity.DOCKED} + if domain == lawn_mower.DOMAIN: + return {"isDocked": state == lawn_mower.LawnMowerActivity.DOCKED} + raise NotImplementedError(f"Unsupported domain {domain}") async def execute(self, command, data, params, challenge): """Execute a dock command.""" - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_RETURN_TO_BASE, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + domain = self.state.domain + service: str | None = None + + if domain == vacuum.DOMAIN: + service = vacuum.SERVICE_RETURN_TO_BASE + elif domain == lawn_mower.DOMAIN: + service = lawn_mower.SERVICE_DOCK + + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) @register_trait @@ -843,7 +860,7 @@ class StartStopTrait(_Trait): @staticmethod def supported(domain, features, device_class, _): """Test if state is supported.""" - if domain == vacuum.DOMAIN: + if domain in (vacuum.DOMAIN, lawn_mower.DOMAIN): return True if ( @@ -863,6 +880,12 @@ class StartStopTrait(_Trait): & VacuumEntityFeature.PAUSE != 0 } + if domain == lawn_mower.DOMAIN: + return { + "pausable": self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + & LawnMowerEntityFeature.PAUSE + != 0 + } if domain in COVER_VALVE_DOMAINS: return {} @@ -878,6 +901,11 @@ class StartStopTrait(_Trait): "isRunning": state == vacuum.VacuumActivity.CLEANING, "isPaused": state == vacuum.VacuumActivity.PAUSED, } + if domain == lawn_mower.DOMAIN: + return { + "isRunning": state == lawn_mower.LawnMowerActivity.MOWING, + "isPaused": state == lawn_mower.LawnMowerActivity.PAUSED, + } if domain in COVER_VALVE_DOMAINS: return { @@ -896,46 +924,52 @@ class StartStopTrait(_Trait): if domain == vacuum.DOMAIN: await self._execute_vacuum(command, data, params, challenge) return + if domain == lawn_mower.DOMAIN: + await self._execute_lawn_mower(command, data, params, challenge) + return if domain in COVER_VALVE_DOMAINS: await self._execute_cover_or_valve(command, data, params, challenge) return async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" + service: str | None = None if command == COMMAND_START_STOP: - if params["start"]: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_START, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) - else: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_STOP, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + service = vacuum.SERVICE_START if params["start"] else vacuum.SERVICE_STOP elif command == COMMAND_PAUSE_UNPAUSE: - if params["pause"]: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_PAUSE, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) - else: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_START, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + service = vacuum.SERVICE_PAUSE if params["pause"] else vacuum.SERVICE_START + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) + + async def _execute_lawn_mower(self, command, data, params, challenge): + """Execute a StartStop command.""" + service: str | None = None + if command == COMMAND_START_STOP: + service = ( + lawn_mower.SERVICE_START_MOWING + if params["start"] + else lawn_mower.SERVICE_DOCK + ) + elif command == COMMAND_PAUSE_UNPAUSE: + service = ( + lawn_mower.SERVICE_PAUSE + if params["pause"] + else lawn_mower.SERVICE_START_MOWING + ) + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) async def _execute_cover_or_valve(self, command, data, params, challenge): """Execute a StartStop command.""" diff --git a/homeassistant/components/google_assistant_sdk/manifest.json b/homeassistant/components/google_assistant_sdk/manifest.json index 85469a464b3..70e93f39f42 100644 --- a/homeassistant/components/google_assistant_sdk/manifest.json +++ b/homeassistant/components/google_assistant_sdk/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["gassist-text==0.0.11"], + "requirements": ["gassist-text==0.0.12"], "single_config_entry": true } diff --git a/homeassistant/components/google_cloud/stt.py b/homeassistant/components/google_cloud/stt.py index 41c5a6710b7..cd5055383ea 100644 --- a/homeassistant/components/google_cloud/stt.py +++ b/homeassistant/components/google_cloud/stt.py @@ -6,6 +6,7 @@ from collections.abc import AsyncGenerator, AsyncIterable import logging from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import speech_v1 from homeassistant.components.stt import ( @@ -127,6 +128,7 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity): responses = await self._client.streaming_recognize( requests=request_generator(), timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), ) transcript = "" diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index 1f5f838b593..16519645dee 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -7,6 +7,7 @@ from pathlib import Path from typing import Any, cast from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import texttospeech import voluptuous as vol @@ -215,7 +216,11 @@ class BaseGoogleCloudProvider: ), ) - response = await self._client.synthesize_speech(request, timeout=10) + response = await self._client.synthesize_speech( + request, + timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), + ) if encoding == texttospeech.AudioEncoding.MP3: extension = "mp3" diff --git a/homeassistant/components/google_generative_ai_conversation/__init__.py b/homeassistant/components/google_generative_ai_conversation/__init__.py index 6b10565e0b5..c32d7b5ddea 100644 --- a/homeassistant/components/google_generative_ai_conversation/__init__.py +++ b/homeassistant/components/google_generative_ai_conversation/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +import mimetypes from pathlib import Path from google import genai # type: ignore[attr-defined] @@ -83,7 +84,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) if not Path(filename).exists(): raise HomeAssistantError(f"`{filename}` does not exist") - prompt_parts.append(client.files.upload(file=filename)) + mimetype = mimetypes.guess_type(filename)[0] + with open(filename, "rb") as file: + uploaded_file = client.files.upload( + file=file, config={"mime_type": mimetype} + ) + prompt_parts.append(uploaded_file) await hass.async_add_executor_job(append_files_to_prompt) diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index 00a016143f4..b413f9c9a62 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -44,6 +44,7 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, @@ -51,6 +52,7 @@ from .const import ( RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_K, RECOMMENDED_TOP_P, + RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, TIMEOUT_MILLIS, ) @@ -341,6 +343,13 @@ async def google_generative_ai_config_option_schema( }, default=RECOMMENDED_HARM_BLOCK_THRESHOLD, ): harm_block_thresholds_selector, + vol.Optional( + CONF_USE_GOOGLE_SEARCH_TOOL, + description={ + "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), + }, + default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + ): bool, } ) return schema diff --git a/homeassistant/components/google_generative_ai_conversation/const.py b/homeassistant/components/google_generative_ai_conversation/const.py index 35834f6e7f9..108ffe1891d 100644 --- a/homeassistant/components/google_generative_ai_conversation/const.py +++ b/homeassistant/components/google_generative_ai_conversation/const.py @@ -22,5 +22,7 @@ CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold" CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold" CONF_DANGEROUS_BLOCK_THRESHOLD = "dangerous_block_threshold" RECOMMENDED_HARM_BLOCK_THRESHOLD = "BLOCK_MEDIUM_AND_ABOVE" +CONF_USE_GOOGLE_SEARCH_TOOL = "enable_google_search_tool" +RECOMMENDED_USE_GOOGLE_SEARCH_TOOL = False TIMEOUT_MILLIS = 10000 diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index b43558c6768..5460f48f20e 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -4,6 +4,7 @@ from __future__ import annotations import codecs from collections.abc import Callable +from dataclasses import replace from typing import Any, Literal, cast from google.genai.errors import APIError @@ -12,6 +13,7 @@ from google.genai.types import ( Content, FunctionDeclaration, GenerateContentConfig, + GoogleSearch, HarmCategory, Part, SafetySetting, @@ -25,7 +27,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( @@ -39,6 +41,7 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -168,17 +171,25 @@ def _escape_decode(value: Any) -> Any: return value +def _create_google_tool_response_parts( + parts: list[conversation.ToolResultContent], +) -> list[Part]: + """Create Google tool response parts.""" + return [ + Part.from_function_response( + name=tool_result.tool_name, response=tool_result.tool_result + ) + for tool_result in parts + ] + + def _create_google_tool_response_content( content: list[conversation.ToolResultContent], ) -> Content: """Create a Google tool response content.""" return Content( - parts=[ - Part.from_function_response( - name=tool_result.tool_name, response=tool_result.tool_result - ) - for tool_result in content - ] + role="user", + parts=_create_google_tool_response_parts(content), ) @@ -188,7 +199,7 @@ def _convert_content( | conversation.SystemContent, ) -> Content: """Convert HA content to Google content.""" - if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr] + if content.role != "assistant" or not content.tool_calls: role = "model" if content.role == "assistant" else content.role return Content( role=role, @@ -264,17 +275,12 @@ class GoogleGenerativeAIConversationEntity( conversation.async_unset_agent(self.hass, self.entry) await super().async_will_remove_from_hass() - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) + def _fix_tool_name(self, tool_name: str) -> str: + """Fix tool name if needed.""" + # The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool + # name. This makes sure when it incorrectly changes the name, that we change it + # back for HA to call. + return tool_name if tool_name != "HasListAddItem" else "HassListAddItem" async def _async_handle_message( self, @@ -301,6 +307,13 @@ class GoogleGenerativeAIConversationEntity( for tool in chat_log.llm_api.tools ] + # Using search grounding allows the model to retrieve information from the web, + # however, it may interfere with how the model decides to use some tools, or entities + # for example weather entity may be disregarded if the model chooses to Google it. + if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True: + tools = tools or [] + tools.append(Tool(google_search=GoogleSearch())) + model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) # Gemini 1.0 doesn't support system_instruction while 1.5 does. # Assume future versions will support it (if not, the request fails with a @@ -326,24 +339,22 @@ class GoogleGenerativeAIConversationEntity( for chat_content in chat_log.content[1:-1]: if chat_content.role == "tool_result": - # mypy doesn't like picking a type based on checking shared property 'role' - tool_results.append(cast(conversation.ToolResultContent, chat_content)) + tool_results.append(chat_content) continue + if ( + not isinstance(chat_content, conversation.ToolResultContent) + and chat_content.content == "" + ): + # Skipping is not possible since the number of function calls need to match the number of function responses + # and skipping one would mean removing the other and hence this would prevent a proper chat log + chat_content = replace(chat_content, content=" ") + if tool_results: messages.append(_create_google_tool_response_content(tool_results)) tool_results.clear() - messages.append( - _convert_content( - cast( - conversation.UserContent - | conversation.SystemContent - | conversation.AssistantContent, - chat_content, - ) - ) - ) + messages.append(_convert_content(chat_content)) if tool_results: messages.append(_create_google_tool_response_content(tool_results)) @@ -399,7 +410,7 @@ class GoogleGenerativeAIConversationEntity( chat = self._genai_client.aio.chats.create( model=model_name, history=messages, config=generateContentConfig ) - chat_request: str | Content = user_input.text + chat_request: str | list[Part] = user_input.text # To prevent infinite loops, we limit the number of iterations for _iteration in range(MAX_TOOL_ITERATIONS): try: @@ -418,6 +429,18 @@ class GoogleGenerativeAIConversationEntity( error = f"Sorry, I had a problem talking to Google Generative AI: {err}" raise HomeAssistantError(error) from err + if (usage_metadata := chat_response.usage_metadata) is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": usage_metadata.prompt_token_count, + "cached_input_tokens": usage_metadata.cached_content_token_count + or 0, + "output_tokens": usage_metadata.candidates_token_count, + } + } + ) + response_parts = chat_response.candidates[0].content.parts if not response_parts: raise HomeAssistantError( @@ -435,10 +458,13 @@ class GoogleGenerativeAIConversationEntity( tool_name = tool_call.name tool_args = _escape_decode(tool_call.args) tool_calls.append( - llm.ToolInput(tool_name=tool_name, tool_args=tool_args) + llm.ToolInput( + tool_name=self._fix_tool_name(tool_name), + tool_args=tool_args, + ) ) - chat_request = _create_google_tool_response_content( + chat_request = _create_google_tool_response_parts( [ tool_response async for tool_response in chat_log.async_add_assistant_content( diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index cc381532c6f..25e44964a6d 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -2,11 +2,11 @@ "domain": "google_generative_ai_conversation", "name": "Google Generative AI", "after_dependencies": ["assist_pipeline", "intent"], - "codeowners": ["@tronikos"], + "codeowners": ["@tronikos", "@ivanlh"], "config_flow": true, "dependencies": ["conversation"], "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["google-genai==1.1.0"] + "requirements": ["google-genai==1.7.0"] } diff --git a/homeassistant/components/google_generative_ai_conversation/strings.json b/homeassistant/components/google_generative_ai_conversation/strings.json index 772fadb089c..b814f89469a 100644 --- a/homeassistant/components/google_generative_ai_conversation/strings.json +++ b/homeassistant/components/google_generative_ai_conversation/strings.json @@ -36,7 +36,8 @@ "harassment_block_threshold": "Negative or harmful comments targeting identity and/or protected attributes", "hate_block_threshold": "Content that is rude, disrespectful, or profane", "sexual_block_threshold": "Contains references to sexual acts or other lewd content", - "dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts" + "dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts", + "enable_google_search_tool": "Enable Google Search tool" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template." @@ -70,7 +71,7 @@ "issues": { "deprecated_image_filename_parameter": { "title": "Deprecated 'image_filename' parameter", - "description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' intead." + "description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' instead." } } } diff --git a/homeassistant/components/google_pubsub/manifest.json b/homeassistant/components/google_pubsub/manifest.json index d3e57c26e39..b96f4e9ebc0 100644 --- a/homeassistant/components/google_pubsub/manifest.json +++ b/homeassistant/components/google_pubsub/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/google_pubsub", "iot_class": "cloud_push", "quality_scale": "legacy", - "requirements": ["google-cloud-pubsub==2.28.0"] + "requirements": ["google-cloud-pubsub==2.29.0"] } diff --git a/homeassistant/components/growatt_server/strings.json b/homeassistant/components/growatt_server/strings.json index 9a985d98034..758428d7a55 100644 --- a/homeassistant/components/growatt_server/strings.json +++ b/homeassistant/components/growatt_server/strings.json @@ -38,28 +38,28 @@ "name": "Input 1 voltage" }, "inverter_amperage_input_1": { - "name": "Input 1 Amperage" + "name": "Input 1 amperage" }, "inverter_wattage_input_1": { - "name": "Input 1 Wattage" + "name": "Input 1 wattage" }, "inverter_voltage_input_2": { "name": "Input 2 voltage" }, "inverter_amperage_input_2": { - "name": "Input 2 Amperage" + "name": "Input 2 amperage" }, "inverter_wattage_input_2": { - "name": "Input 2 Wattage" + "name": "Input 2 wattage" }, "inverter_voltage_input_3": { "name": "Input 3 voltage" }, "inverter_amperage_input_3": { - "name": "Input 3 Amperage" + "name": "Input 3 amperage" }, "inverter_wattage_input_3": { - "name": "Input 3 Wattage" + "name": "Input 3 wattage" }, "inverter_internal_wattage": { "name": "Internal wattage" @@ -137,13 +137,13 @@ "name": "Load consumption" }, "mix_wattage_pv_1": { - "name": "PV1 Wattage" + "name": "PV1 wattage" }, "mix_wattage_pv_2": { - "name": "PV2 Wattage" + "name": "PV2 wattage" }, "mix_wattage_pv_all": { - "name": "All PV Wattage" + "name": "All PV wattage" }, "mix_export_to_grid": { "name": "Export to grid" @@ -182,7 +182,7 @@ "name": "Storage production today" }, "storage_storage_production_lifetime": { - "name": "Lifetime Storage production" + "name": "Lifetime storage production" }, "storage_grid_discharge_today": { "name": "Grid discharged today" @@ -224,7 +224,7 @@ "name": "Storage charging/ discharging(-ve)" }, "storage_load_consumption_solar_storage": { - "name": "Load consumption (Solar + Storage)" + "name": "Load consumption (solar + storage)" }, "storage_charge_today": { "name": "Charge today" @@ -257,7 +257,7 @@ "name": "Output voltage" }, "storage_ac_output_frequency": { - "name": "Ac output frequency" + "name": "AC output frequency" }, "storage_current_pv": { "name": "Solar charge current" @@ -290,7 +290,7 @@ "name": "Lifetime total energy input 1" }, "tlx_energy_today_input_1": { - "name": "Energy Today Input 1" + "name": "Energy today input 1" }, "tlx_voltage_input_1": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_1::name%]" @@ -305,7 +305,7 @@ "name": "Lifetime total energy input 2" }, "tlx_energy_today_input_2": { - "name": "Energy Today Input 2" + "name": "Energy today input 2" }, "tlx_voltage_input_2": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_2::name%]" @@ -320,7 +320,7 @@ "name": "Lifetime total energy input 3" }, "tlx_energy_today_input_3": { - "name": "Energy Today Input 3" + "name": "Energy today input 3" }, "tlx_voltage_input_3": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_3::name%]" @@ -335,16 +335,16 @@ "name": "Lifetime total energy input 4" }, "tlx_energy_today_input_4": { - "name": "Energy Today Input 4" + "name": "Energy today input 4" }, "tlx_voltage_input_4": { "name": "Input 4 voltage" }, "tlx_amperage_input_4": { - "name": "Input 4 Amperage" + "name": "Input 4 amperage" }, "tlx_wattage_input_4": { - "name": "Input 4 Wattage" + "name": "Input 4 wattage" }, "tlx_solar_generation_total": { "name": "Lifetime total solar energy" @@ -434,10 +434,10 @@ "name": "Money lifetime" }, "total_energy_today": { - "name": "Energy Today" + "name": "Energy today" }, "total_output_power": { - "name": "Output Power" + "name": "Output power" }, "total_energy_output": { "name": "[%key:component::growatt_server::entity::sensor::inverter_energy_total::name%]" diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index cf9d08c160c..7a5677cb687 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -52,6 +52,11 @@ ATTR_REMINDER = "reminder" ATTR_REMOVE_REMINDER = "remove_reminder" ATTR_CLEAR_REMINDER = "clear_reminder" ATTR_CLEAR_DATE = "clear_date" +ATTR_REPEAT = "repeat" +ATTR_INTERVAL = "every_x" +ATTR_START_DATE = "start_date" +ATTR_REPEAT_MONTHLY = "repeat_monthly" +ATTR_STREAK = "streak" SERVICE_CAST_SKILL = "cast_skill" SERVICE_START_QUEST = "start_quest" @@ -73,6 +78,8 @@ SERVICE_UPDATE_HABIT = "update_habit" SERVICE_CREATE_HABIT = "create_habit" SERVICE_UPDATE_TODO = "update_todo" SERVICE_CREATE_TODO = "create_todo" +SERVICE_UPDATE_DAILY = "update_daily" +SERVICE_CREATE_DAILY = "create_daily" DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf" X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}" @@ -80,3 +87,5 @@ X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}" SECTION_REAUTH_LOGIN = "reauth_login" SECTION_REAUTH_API_KEY = "reauth_api_key" SECTION_DANGER_ZONE = "danger_zone" + +WEEK_DAYS = ["m", "t", "w", "th", "f", "s", "su"] diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 85adfa09304..aac90814af5 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -259,6 +259,25 @@ "sections": { "developer_options": "mdi:test-tube" } + }, + "update_daily": { + "service": "mdi:calendar-month", + "sections": { + "checklist_options": "mdi:format-list-checks", + "tag_options": "mdi:tag", + "developer_options": "mdi:test-tube", + "reminder_options": "mdi:reminder", + "repeat_weekly_options": "mdi:calendar-refresh", + "repeat_monthly_options": "mdi:calendar-refresh" + } + }, + "create_daily": { + "service": "mdi:calendar-month", + "sections": { + "developer_options": "mdi:test-tube", + "repeat_weekly_options": "mdi:calendar-refresh", + "repeat_monthly_options": "mdi:calendar-refresh" + } } } } diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py index bb8f69a8d11..bcbd6caa7a7 100644 --- a/homeassistant/components/habitica/services.py +++ b/homeassistant/components/habitica/services.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import asdict -from datetime import datetime, time +from datetime import UTC, date, datetime, time import logging from typing import TYPE_CHECKING, Any, cast from uuid import UUID, uuid4 @@ -17,6 +17,7 @@ from habiticalib import ( NotAuthorizedError, NotFoundError, Reminders, + Repeat, Skill, Task, TaskData, @@ -39,6 +40,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ConfigEntrySelector +from homeassistant.util import dt as dt_util from .const import ( ATTR_ADD_CHECKLIST_ITEM, @@ -53,6 +55,7 @@ from .const import ( ATTR_DATA, ATTR_DIRECTION, ATTR_FREQUENCY, + ATTR_INTERVAL, ATTR_ITEM, ATTR_KEYWORD, ATTR_NOTES, @@ -62,8 +65,12 @@ from .const import ( ATTR_REMOVE_CHECKLIST_ITEM, ATTR_REMOVE_REMINDER, ATTR_REMOVE_TAG, + ATTR_REPEAT, + ATTR_REPEAT_MONTHLY, ATTR_SCORE_CHECKLIST_ITEM, ATTR_SKILL, + ATTR_START_DATE, + ATTR_STREAK, ATTR_TAG, ATTR_TARGET, ATTR_TASK, @@ -77,6 +84,7 @@ from .const import ( SERVICE_API_CALL, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO, @@ -87,9 +95,11 @@ from .const import ( SERVICE_SCORE_REWARD, SERVICE_START_QUEST, SERVICE_TRANSFORMATION, + SERVICE_UPDATE_DAILY, SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, SERVICE_UPDATE_TODO, + WEEK_DAYS, ) from .coordinator import HabiticaConfigEntry @@ -152,13 +162,24 @@ BASE_TASK_SCHEMA = vol.Schema( vol.Optional(ATTR_FREQUENCY): vol.Coerce(Frequency), vol.Optional(ATTR_DATE): cv.date, vol.Optional(ATTR_CLEAR_DATE): cv.boolean, - vol.Optional(ATTR_REMINDER): vol.All(cv.ensure_list, [cv.datetime]), - vol.Optional(ATTR_REMOVE_REMINDER): vol.All(cv.ensure_list, [cv.datetime]), + vol.Optional(ATTR_REMINDER): vol.All( + cv.ensure_list, [vol.Any(cv.datetime, cv.time)] + ), + vol.Optional(ATTR_REMOVE_REMINDER): vol.All( + cv.ensure_list, [vol.Any(cv.datetime, cv.time)] + ), vol.Optional(ATTR_CLEAR_REMINDER): cv.boolean, vol.Optional(ATTR_ADD_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), vol.Optional(ATTR_REMOVE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), vol.Optional(ATTR_SCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), vol.Optional(ATTR_UNSCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_START_DATE): cv.date, + vol.Optional(ATTR_INTERVAL): vol.All(int, vol.Range(0)), + vol.Optional(ATTR_REPEAT): vol.All(cv.ensure_list, [vol.In(WEEK_DAYS)]), + vol.Optional(ATTR_REPEAT_MONTHLY): vol.All( + cv.string, vol.In({"day_of_month", "day_of_week"}) + ), + vol.Optional(ATTR_STREAK): vol.All(int, vol.Range(0)), } ) @@ -175,6 +196,12 @@ SERVICE_CREATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend( } ) +SERVICE_DAILY_SCHEMA = { + vol.Optional(ATTR_REMINDER): vol.All(cv.ensure_list, [cv.time]), + vol.Optional(ATTR_REMOVE_REMINDER): vol.All(cv.ensure_list, [cv.time]), +} + + SERVICE_GET_TASKS_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}), @@ -216,6 +243,8 @@ SERVICE_TASK_TYPE_MAP = { SERVICE_CREATE_HABIT: TaskType.HABIT, SERVICE_UPDATE_TODO: TaskType.TODO, SERVICE_CREATE_TODO: TaskType.TODO, + SERVICE_UPDATE_DAILY: TaskType.DAILY, + SERVICE_CREATE_DAILY: TaskType.DAILY, } @@ -605,7 +634,9 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, SERVICE_UPDATE_TODO, + SERVICE_UPDATE_DAILY, ) + task_type = SERVICE_TASK_TYPE_MAP[call.service] current_task = None if is_update: @@ -614,7 +645,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 task for task in coordinator.data.tasks if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text) - and task.Type is SERVICE_TASK_TYPE_MAP[call.service] + and task.Type is task_type ) except StopIteration as e: raise ServiceValidationError( @@ -626,7 +657,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 data = Task() if not is_update: - data["type"] = SERVICE_TASK_TYPE_MAP[call.service] + data["type"] = task_type if (text := call.data.get(ATTR_RENAME)) or (text := call.data.get(ATTR_NAME)): data["text"] = text @@ -702,6 +733,8 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 if frequency := call.data.get(ATTR_FREQUENCY): data["frequency"] = frequency + else: + frequency = current_task.frequency if current_task else Frequency.WEEKLY if up_down := call.data.get(ATTR_UP_DOWN): data["up"] = "up" in up_down @@ -752,23 +785,46 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 reminders = current_task.reminders if current_task else [] if add_reminders := call.data.get(ATTR_REMINDER): - existing_reminder_datetimes = { - r.time.replace(tzinfo=None) for r in reminders - } + if task_type is TaskType.TODO: + existing_reminder_datetimes = { + r.time.replace(tzinfo=None) for r in reminders + } - reminders.extend( - Reminders(id=uuid4(), time=r) - for r in add_reminders - if r not in existing_reminder_datetimes - ) + reminders.extend( + Reminders(id=uuid4(), time=r) + for r in add_reminders + if r not in existing_reminder_datetimes + ) + if task_type is TaskType.DAILY: + existing_reminder_times = { + r.time.time().replace(microsecond=0, second=0) for r in reminders + } + + reminders.extend( + Reminders( + id=uuid4(), + time=datetime.combine(date.today(), r, tzinfo=UTC), + ) + for r in add_reminders + if r not in existing_reminder_times + ) if remove_reminder := call.data.get(ATTR_REMOVE_REMINDER): - reminders = list( - filter( - lambda r: r.time.replace(tzinfo=None) not in remove_reminder, - reminders, + if task_type is TaskType.TODO: + reminders = list( + filter( + lambda r: r.time.replace(tzinfo=None) not in remove_reminder, + reminders, + ) + ) + if task_type is TaskType.DAILY: + reminders = list( + filter( + lambda r: r.time.time().replace(second=0, microsecond=0) + not in remove_reminder, + reminders, + ) ) - ) if clear_reminders := call.data.get(ATTR_CLEAR_REMINDER): reminders = [] @@ -776,6 +832,47 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 if add_reminders or remove_reminder or clear_reminders: data["reminders"] = reminders + if start_date := call.data.get(ATTR_START_DATE): + data["startDate"] = datetime.combine(start_date, time()) + else: + start_date = ( + current_task.startDate + if current_task and current_task.startDate + else dt_util.start_of_local_day() + ) + if repeat := call.data.get(ATTR_REPEAT): + if frequency is Frequency.WEEKLY: + data["repeat"] = Repeat(**{d: d in repeat for d in WEEK_DAYS}) + else: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="frequency_not_weekly", + ) + if repeat_monthly := call.data.get(ATTR_REPEAT_MONTHLY): + if frequency is not Frequency.MONTHLY: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="frequency_not_monthly", + ) + + if repeat_monthly == "day_of_week": + weekday = start_date.weekday() + data["weeksOfMonth"] = [(start_date.day - 1) // 7] + data["repeat"] = Repeat( + **{day: i == weekday for i, day in enumerate(WEEK_DAYS)} + ) + data["daysOfMonth"] = [] + + else: + data["daysOfMonth"] = [start_date.day] + data["weeksOfMonth"] = [] + + if interval := call.data.get(ATTR_INTERVAL): + data["everyX"] = interval + + if streak := call.data.get(ATTR_STREAK): + data["streak"] = streak + try: if is_update: if TYPE_CHECKING: @@ -805,7 +902,12 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 else: return response.data.to_dict(omit_none=True) - for service in (SERVICE_UPDATE_TODO, SERVICE_UPDATE_REWARD, SERVICE_UPDATE_HABIT): + for service in ( + SERVICE_UPDATE_DAILY, + SERVICE_UPDATE_HABIT, + SERVICE_UPDATE_REWARD, + SERVICE_UPDATE_TODO, + ): hass.services.async_register( DOMAIN, service, @@ -813,7 +915,12 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 schema=SERVICE_UPDATE_TASK_SCHEMA, supports_response=SupportsResponse.ONLY, ) - for service in (SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO): + for service in ( + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ): hass.services.async_register( DOMAIN, service, diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index acbe4e62824..3fb25e2b4b7 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -268,7 +268,7 @@ update_todo: task: *task rename: *rename notes: *notes - checklist_options: + checklist_options: &checklist_options collapsed: true fields: add_checklist_item: &add_checklist_item @@ -320,7 +320,7 @@ update_todo: text: type: datetime-local multiple: true - clear_reminder: + clear_reminder: &clear_reminder required: false selector: constant: @@ -339,3 +339,108 @@ create_todo: reminder: *reminder tag: *tag developer_options: *developer_options +update_daily: + fields: + config_entry: *config_entry + task: *task + rename: *rename + notes: *notes + checklist_options: *checklist_options + priority: *priority + start_date: &start_date + required: false + selector: + date: + frequency: &frequency_daily + required: false + selector: + select: + options: + - "daily" + - "weekly" + - "monthly" + - "yearly" + translation_key: "frequency" + mode: dropdown + every_x: &every_x + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "🔃" + mode: box + repeat_weekly_options: &repeat_weekly_options + collapsed: true + fields: + repeat: + required: false + selector: + select: + options: + - "m" + - "t" + - "w" + - "th" + - "f" + - "s" + - "su" + mode: list + translation_key: repeat + multiple: true + repeat_monthly_options: &repeat_monthly_options + collapsed: true + fields: + repeat_monthly: + required: false + selector: + select: + options: + - "day_of_month" + - "day_of_week" + translation_key: repeat_monthly + mode: list + reminder_options: + collapsed: true + fields: + reminder: &reminder_daily + required: false + selector: + text: + type: time + multiple: true + remove_reminder: + required: false + selector: + text: + type: time + multiple: true + clear_reminder: *clear_reminder + tag_options: *tag_options + developer_options: + collapsed: true + fields: + streak: &streak + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "▶▶" + mode: box + alias: *alias +create_daily: + fields: + config_entry: *config_entry + name: *name + notes: *notes + add_checklist_item: *add_checklist_item + priority: *priority + start_date: *start_date + frequency: *frequency_daily + every_x: *every_x + repeat_weekly_options: *repeat_weekly_options + repeat_monthly_options: *repeat_monthly_options + reminder: *reminder_daily + tag: *tag + developer_options: *developer_options diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 513c0b36b27..695eb1576fe 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -51,7 +51,20 @@ "reminder_options_name": "Reminders", "reminder_options_description": "Add, remove or clear reminders of a Habitica task.", "date_name": "Due date", - "date_description": "The to-do's due date." + "date_description": "The to-do's due date.", + "repeat_name": "Repeat on", + "start_date_name": "Start date", + "start_date_description": "Defines when the daily task becomes active and specifies the exact weekday or day of the month it repeats on.", + "frequency_daily_name": "Repeat interval", + "frequency_daily_description": "The repetition interval of a daily.", + "every_x_name": "Repeat every X", + "every_x_description": "The number of intervals (days, weeks, months, or years) after which the daily repeats, based on the chosen repetition interval. A value of 0 makes the daily inactive ('Grey Daily').", + "repeat_weekly_description": "The days of the week the daily repeats.", + "repeat_monthly_description": "Whether a monthly recurring task repeats on the same calendar day each month or on the same weekday and week of the month, based on the start date.", + "repeat_weekly_options_name": "Weekly repeat days", + "repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.", + "repeat_monthly_options_name": "Monthly repeat day", + "repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly." }, "config": { "abort": { @@ -767,7 +780,7 @@ "description": "[%key:component::habitica::common::notes_description%]" }, "tag": { - "name": "[%key:component::habitica::common::tag_name%]", + "name": "[%key:component::habitica::common::tag_options_name%]", "description": "[%key:component::habitica::common::tag_description%]" }, "alias": { @@ -867,7 +880,7 @@ "description": "[%key:component::habitica::common::notes_description%]" }, "tag": { - "name": "[%key:component::habitica::common::tag_name%]", + "name": "[%key:component::habitica::common::tag_options_name%]", "description": "[%key:component::habitica::common::tag_description%]" }, "alias": { @@ -1007,7 +1020,7 @@ "description": "[%key:component::habitica::common::notes_description%]" }, "tag": { - "name": "[%key:component::habitica::common::tag_name%]", + "name": "[%key:component::habitica::common::tag_options_name%]", "description": "[%key:component::habitica::common::tag_description%]" }, "alias": { @@ -1023,11 +1036,11 @@ "description": "[%key:component::habitica::common::date_description%]" }, "reminder": { - "name": "[%key:component::habitica::common::reminder_name%]", + "name": "[%key:component::habitica::common::reminder_options_name%]", "description": "[%key:component::habitica::common::reminder_description%]" }, "add_checklist_item": { - "name": "[%key:component::habitica::common::add_checklist_item_name%]", + "name": "[%key:component::habitica::common::checklist_options_name%]", "description": "[%key:component::habitica::common::add_checklist_item_description%]" } }, @@ -1037,6 +1050,194 @@ "description": "[%key:component::habitica::common::developer_options_description%]" } } + }, + "update_daily": { + "name": "Update a daily", + "description": "Updates a specific daily for a selected Habitica character", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "task": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "The name (or task ID) of the daily you want to update." + }, + "rename": { + "name": "[%key:component::habitica::common::rename_name%]", + "description": "[%key:component::habitica::common::rename_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "remove_tag": { + "name": "[%key:component::habitica::common::remove_tag_name%]", + "description": "[%key:component::habitica::common::remove_tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "start_date": { + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" + }, + "every_x": { + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" + }, + "repeat": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_description%]" + }, + "repeat_monthly": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::add_checklist_item_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "remove_checklist_item": { + "name": "[%key:component::habitica::common::remove_checklist_item_name%]", + "description": "[%key:component::habitica::common::remove_checklist_item_description%]" + }, + "score_checklist_item": { + "name": "[%key:component::habitica::common::score_checklist_item_name%]", + "description": "[%key:component::habitica::common::score_checklist_item_description%]" + }, + "unscore_checklist_item": { + "name": "[%key:component::habitica::common::unscore_checklist_item_name%]", + "description": "[%key:component::habitica::common::unscore_checklist_item_description%]" + }, + "streak": { + "name": "Adjust streak", + "description": "Adjust or reset the streak counter of the daily." + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + }, + "remove_reminder": { + "name": "[%key:component::habitica::common::remove_reminder_name%]", + "description": "[%key:component::habitica::common::remove_reminder_description%]" + }, + "clear_reminder": { + "name": "[%key:component::habitica::common::clear_reminder_name%]", + "description": "[%key:component::habitica::common::clear_reminder_description%]" + } + }, + "sections": { + "checklist_options": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::checklist_options_description%]" + }, + "repeat_weekly_options": { + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" + }, + "repeat_monthly_options": { + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" + }, + "tag_options": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + }, + "reminder_options": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_options_description%]" + } + } + }, + "create_daily": { + "name": "Create a daily", + "description": "Adds a new daily.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "start_date": { + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" + }, + "every_x": { + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" + }, + "repeat": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_description%]" + }, + "repeat_monthly": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + } + }, + "sections": { + "repeat_weekly_options": { + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" + }, + "repeat_monthly_options": { + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } } }, "selector": { @@ -1079,6 +1280,23 @@ "monthly": "Monthly", "yearly": "Yearly" } + }, + "repeat": { + "options": { + "m": "[%key:common::time::monday%]", + "t": "[%key:common::time::tuesday%]", + "w": "[%key:common::time::wednesday%]", + "th": "[%key:common::time::thursday%]", + "f": "[%key:common::time::friday%]", + "s": "[%key:common::time::saturday%]", + "su": "[%key:common::time::sunday%]" + } + }, + "repeat_monthly": { + "options": { + "day_of_month": "Day of the month", + "day_of_week": "Day of the week" + } } } } diff --git a/homeassistant/components/harmony/manifest.json b/homeassistant/components/harmony/manifest.json index aab4f51b09a..f67eb4db5aa 100644 --- a/homeassistant/components/harmony/manifest.json +++ b/homeassistant/components/harmony/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/harmony", "iot_class": "local_push", "loggers": ["aioharmony", "slixmpp"], - "requirements": ["aioharmony==0.4.1"], + "requirements": ["aioharmony==0.5.2"], "ssdp": [ { "manufacturer": "Logitech", diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 799067b8215..a543dbc7f89 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -152,7 +152,7 @@ }, "unsupported_connectivity_check": { "title": "Unsupported system - Connectivity check disabled", - "description": "System is unsupported because Home Assistant cannot determine when an internet connection is available. Use the link to learn more and how to fix this." + "description": "System is unsupported because Home Assistant cannot determine when an Internet connection is available. Use the link to learn more and how to fix this." }, "unsupported_content_trust": { "title": "Unsupported system - Content-trust check disabled", @@ -216,7 +216,7 @@ }, "unsupported_systemd_journal": { "title": "Unsupported system - Systemd Journal issues", - "description": "System is unsupported because Systemd Journal and/or the gateway service is missing, inactive or misconfigured . Use the link to learn more and how to fix this." + "description": "System is unsupported because Systemd Journal and/or the gateway service is missing, inactive or misconfigured. Use the link to learn more and how to fix this." }, "unsupported_systemd_resolved": { "title": "Unsupported system - Systemd-Resolved issues", @@ -348,7 +348,7 @@ }, "homeassistant_exclude_database": { "name": "Home Assistant exclude database", - "description": "Exclude the Home Assistant database file from backup" + "description": "Exclude the Home Assistant database file from the backup." } } }, @@ -385,8 +385,8 @@ "description": "[%key:component::hassio::services::backup_full::fields::location::description%]" }, "homeassistant_exclude_database": { - "name": "Home Assistant exclude database", - "description": "Exclude the Home Assistant database file from backup" + "name": "[%key:component::hassio::services::backup_full::fields::homeassistant_exclude_database::name%]", + "description": "[%key:component::hassio::services::backup_full::fields::homeassistant_exclude_database::description%]" } } }, diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 6d603f7ad30..789fbc12b8e 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,6 +4,7 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" DOMAIN = "heos" ENTRY_TITLE = "HEOS System" +SERVICE_GET_QUEUE = "get_queue" SERVICE_GROUP_VOLUME_SET = "group_volume_set" SERVICE_GROUP_VOLUME_DOWN = "group_volume_down" SERVICE_GROUP_VOLUME_UP = "group_volume_up" diff --git a/homeassistant/components/heos/coordinator.py b/homeassistant/components/heos/coordinator.py index 0303d150794..5e72eb1427e 100644 --- a/homeassistant/components/heos/coordinator.py +++ b/homeassistant/components/heos/coordinator.py @@ -6,7 +6,6 @@ entities to update. Entities subscribe to entity-specific updates within the ent """ from collections.abc import Callable, Sequence -from datetime import datetime, timedelta import logging from typing import Any @@ -25,10 +24,10 @@ from pyheos import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -43,7 +42,6 @@ class HeosCoordinator(DataUpdateCoordinator[None]): def __init__(self, hass: HomeAssistant, config_entry: HeosConfigEntry) -> None: """Set up the coordinator and set in config_entry.""" - self.host: str = config_entry.data[CONF_HOST] credentials: Credentials | None = None if config_entry.options: credentials = Credentials( @@ -53,19 +51,31 @@ class HeosCoordinator(DataUpdateCoordinator[None]): # media position update upon start of playback or when media changes self.heos = Heos( HeosOptions( - self.host, + config_entry.data[CONF_HOST], all_progress_events=False, auto_reconnect=True, + auto_failover=True, credentials=credentials, ) ) self._platform_callbacks: list[Callable[[Sequence[HeosPlayer]], None]] = [] - self._update_sources_pending: bool = False + self._update_sources_debouncer = Debouncer( + hass, + _LOGGER, + immediate=True, + cooldown=2.0, + function=self._async_update_sources, + ) self._source_list: list[str] = [] self._favorites: dict[int, MediaItem] = {} self._inputs: Sequence[MediaItem] = [] super().__init__(hass, _LOGGER, config_entry=config_entry, name=DOMAIN) + @property + def host(self) -> str: + """Get the host address of the device.""" + return self.heos.current_host + @property def inputs(self) -> Sequence[MediaItem]: """Get input sources across all devices.""" @@ -159,43 +169,27 @@ class HeosCoordinator(DataUpdateCoordinator[None]): async def _async_on_reconnected(self) -> None: """Handle when reconnected so resources are updated and entities marked available.""" - await self._async_update_players() + assert self.config_entry is not None + if self.host != self.config_entry.data[CONF_HOST]: + self.hass.config_entries.async_update_entry( + self.config_entry, data={CONF_HOST: self.host} + ) + _LOGGER.warning("Successfully failed over to HEOS host %s", self.host) + else: + _LOGGER.warning("Successfully reconnected to HEOS host %s", self.host) await self._async_update_sources() - _LOGGER.warning("Successfully reconnected to HEOS host %s", self.host) self.async_update_listeners() async def _async_on_controller_event( - self, event: str, data: PlayerUpdateResult | None + self, event: str, data: PlayerUpdateResult | None = None ) -> None: """Handle a controller event, such as players or groups changed.""" if event == const.EVENT_PLAYERS_CHANGED: assert data is not None self._async_handle_player_update_result(data) - elif ( - event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED) - and not self._update_sources_pending - ): - # Update the sources after a brief delay as we may have received multiple qualifying - # events at once and devices cannot handle immediately attempting to refresh sources. - self._update_sources_pending = True - - async def update_sources_job(_: datetime | None = None) -> None: - await self._async_update_sources() - self._update_sources_pending = False - self.async_update_listeners() - - assert self.config_entry is not None - self.config_entry.async_on_unload( - async_call_later( - self.hass, - timedelta(seconds=1), - HassJob( - update_sources_job, - "heos_update_sources", - cancel_on_shutdown=True, - ), - ) - ) + elif event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED): + # Debounce because we may have received multiple qualifying events in rapid succession. + await self._update_sources_debouncer.async_call() self.async_update_listeners() def _async_update_player_ids(self, updated_player_ids: dict[int, int]) -> None: @@ -257,15 +251,6 @@ class HeosCoordinator(DataUpdateCoordinator[None]): else: self._source_list.extend([source.name for source in self._inputs]) - async def _async_update_players(self) -> None: - """Update players after reconnection.""" - try: - player_updates = await self.heos.load_players() - except HeosError as error: - _LOGGER.error("Unable to refresh players: %s", error) - return - self._async_handle_player_update_result(player_updates) - @callback def async_get_source_list(self) -> list[str]: """Return the list of sources for players.""" diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index d7a998b6aec..c957ac1939c 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,5 +1,8 @@ { "services": { + "get_queue": { + "service": "mdi:playlist-music" + }, "group_volume_set": { "service": "mdi:volume-medium" }, diff --git a/homeassistant/components/heos/manifest.json b/homeassistant/components/heos/manifest.json index 573deda2132..cbac9f20574 100644 --- a/homeassistant/components/heos/manifest.json +++ b/homeassistant/components/heos/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["pyheos"], "quality_scale": "platinum", - "requirements": ["pyheos==1.0.2"], + "requirements": ["pyheos==1.0.4"], "ssdp": [ { "st": "urn:schemas-denon-com:device:ACT-Denon:1" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 9edc674d1cf..81d997ba44f 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -3,27 +3,36 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine, Sequence +from contextlib import suppress +import dataclasses from datetime import datetime from functools import reduce, wraps +import logging from operator import ior -from typing import Any +from typing import Any, Final from pyheos import ( AddCriteriaType, ControlType, HeosError, HeosPlayer, + MediaItem, + MediaMusicSource, + MediaType as HeosMediaType, PlayState, RepeatType, const as heos_const, ) +from pyheos.util import mediauri as heos_source import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_VOLUME_LEVEL, + BrowseError, BrowseMedia, + MediaClass, MediaPlayerEnqueue, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -32,8 +41,14 @@ from homeassistant.components.media_player import ( RepeatMode, async_process_play_media_url, ) +from homeassistant.components.media_source import BrowseMediaSource from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import ( + HomeAssistant, + ServiceResponse, + SupportsResponse, + callback, +) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, @@ -47,6 +62,7 @@ from homeassistant.util.dt import utcnow from .const import ( DOMAIN as HEOS_DOMAIN, + SERVICE_GET_QUEUE, SERVICE_GROUP_VOLUME_DOWN, SERVICE_GROUP_VOLUME_SET, SERVICE_GROUP_VOLUME_UP, @@ -55,6 +71,8 @@ from .coordinator import HeosConfigEntry, HeosCoordinator PARALLEL_UPDATES = 0 +BROWSE_ROOT: Final = "heos://media" + BASE_SUPPORTED_FEATURES = ( MediaPlayerEntityFeature.VOLUME_MUTE | MediaPlayerEntityFeature.VOLUME_SET @@ -97,6 +115,21 @@ HEOS_HA_REPEAT_TYPE_MAP = { } HA_HEOS_REPEAT_TYPE_MAP = {v: k for k, v in HEOS_HA_REPEAT_TYPE_MAP.items()} +HEOS_MEDIA_TYPE_TO_MEDIA_CLASS = { + HeosMediaType.ALBUM: MediaClass.ALBUM, + HeosMediaType.ARTIST: MediaClass.ARTIST, + HeosMediaType.CONTAINER: MediaClass.DIRECTORY, + HeosMediaType.GENRE: MediaClass.GENRE, + HeosMediaType.HEOS_SERVER: MediaClass.DIRECTORY, + HeosMediaType.HEOS_SERVICE: MediaClass.DIRECTORY, + HeosMediaType.MUSIC_SERVICE: MediaClass.DIRECTORY, + HeosMediaType.PLAYLIST: MediaClass.PLAYLIST, + HeosMediaType.SONG: MediaClass.TRACK, + HeosMediaType.STATION: MediaClass.TRACK, +} + +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry( hass: HomeAssistant, @@ -106,6 +139,12 @@ async def async_setup_entry( """Add media players for a config entry.""" # Register custom entity services platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + None, + "async_get_queue", + supports_response=SupportsResponse.ONLY, + ) platform.async_register_entity_service( SERVICE_GROUP_VOLUME_SET, {vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float}, @@ -129,20 +168,20 @@ async def async_setup_entry( add_entities_callback(list(coordinator.heos.players.values())) -type _FuncType[**_P] = Callable[_P, Awaitable[Any]] -type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, None]] +type _FuncType[**_P, _R] = Callable[_P, Awaitable[_R]] +type _ReturnFuncType[**_P, _R] = Callable[_P, Coroutine[Any, Any, _R]] -def catch_action_error[**_P]( +def catch_action_error[**_P, _R]( action: str, -) -> Callable[[_FuncType[_P]], _ReturnFuncType[_P]]: +) -> Callable[[_FuncType[_P, _R]], _ReturnFuncType[_P, _R]]: """Return decorator that catches errors and raises HomeAssistantError.""" - def decorator(func: _FuncType[_P]) -> _ReturnFuncType[_P]: + def decorator(func: _FuncType[_P, _R]) -> _ReturnFuncType[_P, _R]: @wraps(func) - async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> None: + async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: try: - await func(*args, **kwargs) + return await func(*args, **kwargs) except (HeosError, ValueError) as ex: raise HomeAssistantError( translation_domain=HEOS_DOMAIN, @@ -242,6 +281,12 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): self.async_on_remove(self._player.add_on_player_event(self._player_update)) await super().async_added_to_hass() + @catch_action_error("get queue") + async def async_get_queue(self) -> ServiceResponse: + """Get the queue for the current player.""" + queue = await self._player.get_queue() + return {"queue": [dataclasses.asdict(item) for item in queue]} + @catch_action_error("clear playlist") async def async_clear_playlist(self) -> None: """Clear players playlist.""" @@ -282,6 +327,16 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play a piece of media.""" + if heos_source.is_media_uri(media_id): + media, data = heos_source.from_media_uri(media_id) + if not isinstance(media, MediaItem): + raise ValueError(f"Invalid media id '{media_id}'") + await self._player.play_media( + media, + HA_HEOS_ENQUEUE_MAP[kwargs.get(ATTR_MEDIA_ENQUEUE)], + ) + return + if media_source.is_media_source_id(media_id): media_type = MediaType.URL play_item = await media_source.async_resolve_media( @@ -332,6 +387,15 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): await self._player.play_preset_station(index) return + if media_type == "queue": + # media_id must be an int + try: + queue_id = int(media_id) + except ValueError: + raise ValueError(f"Invalid queue id '{media_id}'") from None + await self._player.play_queue(queue_id) + return + raise ValueError(f"Unsupported media type '{media_type}'") @catch_action_error("select source") @@ -534,14 +598,103 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): """Volume level of the media player (0..1).""" return self._player.volume / 100 + async def _async_browse_media_root(self) -> BrowseMedia: + """Return media browsing root.""" + if not self.coordinator.heos.music_sources: + try: + await self.coordinator.heos.get_music_sources() + except HeosError as error: + _LOGGER.debug("Unable to load music sources: %s", error) + children: list[BrowseMedia] = [ + _media_to_browse_media(source) + for source in self.coordinator.heos.music_sources.values() + if source.available or source.source_id == heos_const.MUSIC_SOURCE_TUNEIN + ] + root = BrowseMedia( + title="Music Sources", + media_class=MediaClass.DIRECTORY, + children_media_class=MediaClass.DIRECTORY, + media_content_type="", + media_content_id=BROWSE_ROOT, + can_expand=True, + can_play=False, + children=children, + ) + # Append media source items + with suppress(BrowseError): + browse = await self._async_browse_media_source() + # If domain is None, it's an overview of available sources + if browse.domain is None and browse.children: + children.extend(browse.children) + else: + children.append(browse) + return root + + async def _async_browse_heos_media(self, media_content_id: str) -> BrowseMedia: + """Browse a HEOS media item.""" + media, data = heos_source.from_media_uri(media_content_id) + browse_media = _media_to_browse_media(media) + try: + browse_result = await self.coordinator.heos.browse_media(media) + except HeosError as error: + _LOGGER.debug("Unable to browse media %s: %s", media, error) + else: + browse_media.children = [ + _media_to_browse_media(item) + for item in browse_result.items + if item.browsable or item.playable + ] + return browse_media + + async def _async_browse_media_source( + self, media_content_id: str | None = None + ) -> BrowseMediaSource: + """Browse a media source item.""" + return await media_source.async_browse_media( + self.hass, + media_content_id, + content_filter=lambda item: item.media_content_type.startswith("audio/"), + ) + async def async_browse_media( self, media_content_type: MediaType | str | None = None, media_content_id: str | None = None, ) -> BrowseMedia: """Implement the websocket media browsing helper.""" - return await media_source.async_browse_media( - self.hass, - media_content_id, - content_filter=lambda item: item.media_content_type.startswith("audio/"), + if media_content_id in (None, BROWSE_ROOT): + return await self._async_browse_media_root() + assert media_content_id is not None + if heos_source.is_media_uri(media_content_id): + return await self._async_browse_heos_media(media_content_id) + if media_source.is_media_source_id(media_content_id): + return await self._async_browse_media_source(media_content_id) + raise ServiceValidationError( + translation_domain=HEOS_DOMAIN, + translation_key="unsupported_media_content_id", + translation_placeholders={"media_content_id": media_content_id}, ) + + +def _media_to_browse_media(media: MediaItem | MediaMusicSource) -> BrowseMedia: + """Convert a HEOS media item to a browse media item.""" + can_expand = False + can_play = False + + if isinstance(media, MediaMusicSource): + can_expand = ( + media.source_id == heos_const.MUSIC_SOURCE_TUNEIN or media.available + ) + else: + can_expand = media.browsable + can_play = media.playable + + return BrowseMedia( + can_expand=can_expand, + can_play=can_play, + media_content_id=heos_source.to_media_uri(media), + media_content_type="", + media_class=HEOS_MEDIA_TYPE_TO_MEDIA_CLASS[media.type], + title=media.name, + thumbnail=media.image_url, + ) diff --git a/homeassistant/components/heos/services.yaml b/homeassistant/components/heos/services.yaml index 8f3a43421f6..fa79bd03096 100644 --- a/homeassistant/components/heos/services.yaml +++ b/homeassistant/components/heos/services.yaml @@ -1,3 +1,9 @@ +get_queue: + target: + entity: + integration: heos + domain: media_player + group_volume_set: target: entity: diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 340eecb9f8b..38e3349b7c0 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -86,6 +86,10 @@ } } }, + "get_queue": { + "name": "Get queue", + "description": "Retrieves the queue of the media player." + }, "group_volume_down": { "name": "Turn down group volume", "description": "Turns down the group volume." @@ -146,6 +150,9 @@ }, "unknown_source": { "message": "Unknown source: {source}" + }, + "unsupported_media_content_id": { + "message": "Unsupported media_content_id: {media_content_id}" } }, "issues": { diff --git a/homeassistant/components/hive/strings.json b/homeassistant/components/hive/strings.json index 219776ad7e6..6323a2eecbf 100644 --- a/homeassistant/components/hive/strings.json +++ b/homeassistant/components/hive/strings.json @@ -2,27 +2,27 @@ "config": { "step": { "user": { - "title": "Hive Login", + "title": "Hive login", "description": "Enter your Hive login information.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "scan_interval": "Scan Interval (seconds)" + "scan_interval": "Scan interval (seconds)" } }, "2fa": { - "title": "Hive Two-factor Authentication.", - "description": "Enter your Hive authentication code. \n \n Please enter code 0000 to request another code.", + "title": "Hive two-factor authentication", + "description": "Enter your Hive authentication code.\n\nPlease enter code 0000 to request another code.", "data": { "2fa": "Two-factor code" } }, "configuration": { + "title": "Hive configuration", + "description": "Enter your Hive configuration.", "data": { - "device_name": "Device Name" - }, - "description": "Enter your Hive configuration", - "title": "Hive Configuration." + "device_name": "Device name" + } }, "reauth": { "title": "[%key:component::hive::config::step::user::title%]", @@ -37,7 +37,7 @@ "invalid_username": "Failed to sign into Hive. Your email address is not recognised.", "invalid_password": "Failed to sign into Hive. Incorrect password, please try again.", "invalid_code": "Failed to sign into Hive. Your two-factor authentication code was incorrect.", - "no_internet_available": "An internet connection is required to connect to Hive.", + "no_internet_available": "An Internet connection is required to connect to Hive.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/hko/config_flow.py b/homeassistant/components/hko/config_flow.py index 8548bb4767d..1e2a6230455 100644 --- a/homeassistant/components/hko/config_flow.py +++ b/homeassistant/components/hko/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +import logging from typing import Any from hko import HKO, LOCATIONS, HKOError @@ -15,6 +16,8 @@ from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from .const import API_RHRREAD, DEFAULT_LOCATION, DOMAIN, KEY_LOCATION +_LOGGER = logging.getLogger(__name__) + def get_loc_name(item): """Return an array of supported locations.""" @@ -54,7 +57,8 @@ class HKOConfigFlow(ConfigFlow, domain=DOMAIN): except HKOError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id( diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index ec47b222370..4c73210c36e 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.68", "babel==2.15.0"] + "requirements": ["holidays==0.69", "babel==2.15.0"] } diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index 3e1bd1da156..fe01a3e9564 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -2,189 +2,29 @@ from __future__ import annotations -from collections.abc import Awaitable import logging -from typing import Any, cast +from typing import Any from aiohomeconnect.client import Client as HomeConnectClient -from aiohomeconnect.model import ( - ArrayOfOptions, - CommandKey, - Option, - OptionKey, - ProgramKey, - SettingKey, -) -from aiohomeconnect.model.error import HomeConnectError -import voluptuous as vol +import aiohttp -from homeassistant.const import ATTR_DEVICE_ID, Platform -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - config_entry_oauth2_flow, - config_validation as cv, - device_registry as dr, -) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - async_delete_issue, -) +from homeassistant.helpers.issue_registry import async_delete_issue from homeassistant.helpers.typing import ConfigType from .api import AsyncConfigEntryAuth -from .const import ( - AFFECTS_TO_ACTIVE_PROGRAM, - AFFECTS_TO_SELECTED_PROGRAM, - ATTR_AFFECTS_TO, - ATTR_KEY, - ATTR_PROGRAM, - ATTR_UNIT, - ATTR_VALUE, - DOMAIN, - OLD_NEW_UNIQUE_ID_SUFFIX_MAP, - PROGRAM_ENUM_OPTIONS, - SERVICE_OPTION_ACTIVE, - SERVICE_OPTION_SELECTED, - SERVICE_PAUSE_PROGRAM, - SERVICE_RESUME_PROGRAM, - SERVICE_SELECT_PROGRAM, - SERVICE_SET_PROGRAM_AND_OPTIONS, - SERVICE_SETTING, - SERVICE_START_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_VALUE, - TRANSLATION_KEYS_PROGRAMS_MAP, -) +from .const import DOMAIN, OLD_NEW_UNIQUE_ID_SUFFIX_MAP from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator -from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error +from .services import register_actions _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) - -PROGRAM_OPTIONS = { - bsh_key_to_translation_key(key): ( - key, - value, - ) - for key, value in { - OptionKey.BSH_COMMON_DURATION: int, - OptionKey.BSH_COMMON_START_IN_RELATIVE: int, - OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int, - OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int, - OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool, - OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool, - OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool, - OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool, - OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool, - OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool, - OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool, - OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int, - OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool, - OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool, - OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool, - }.items() -} - - -SERVICE_SETTING_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(SettingKey), - vol.NotIn([SettingKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(str, int, bool), - } -) - -# DEPRECATED: Remove in 2025.9.0 -SERVICE_OPTION_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(OptionKey), - vol.NotIn([OptionKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(str, int, bool), - vol.Optional(ATTR_UNIT): str, - } -) - -# DEPRECATED: Remove in 2025.9.0 -SERVICE_PROGRAM_SCHEMA = vol.Any( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_PROGRAM): vol.All( - vol.Coerce(ProgramKey), - vol.NotIn([ProgramKey.UNKNOWN]), - ), - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(OptionKey), - vol.NotIn([OptionKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(int, str), - vol.Optional(ATTR_UNIT): str, - }, - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_PROGRAM): vol.All( - vol.Coerce(ProgramKey), - vol.NotIn([ProgramKey.UNKNOWN]), - ), - }, -) - - -def _require_program_or_at_least_one_option(data: dict) -> dict: - if ATTR_PROGRAM not in data and not any( - option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS) - ): - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="required_program_or_one_option_at_least", - ) - return data - - -SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All( - vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_AFFECTS_TO): vol.In( - [AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM] - ), - vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()), - } - ) - .extend( - { - vol.Optional(translation_key): vol.In(allowed_values.keys()) - for translation_key, ( - key, - allowed_values, - ) in PROGRAM_ENUM_OPTIONS.items() - } - ) - .extend( - { - vol.Optional(translation_key): schema - for translation_key, (key, schema) in PROGRAM_OPTIONS.items() - } - ), - _require_program_or_at_least_one_option, -) - -SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str}) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -197,406 +37,9 @@ PLATFORMS = [ ] -async def _get_client_and_ha_id( - hass: HomeAssistant, device_id: str -) -> tuple[HomeConnectClient, str]: - device_registry = dr.async_get(hass) - device_entry = device_registry.async_get(device_id) - if device_entry is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="device_entry_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - entry: HomeConnectConfigEntry | None = None - for entry_id in device_entry.config_entries: - _entry = hass.config_entries.async_get_entry(entry_id) - assert _entry - if _entry.domain == DOMAIN: - entry = cast(HomeConnectConfigEntry, _entry) - break - if entry is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="config_entry_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - - ha_id = next( - ( - identifier[1] - for identifier in device_entry.identifiers - if identifier[0] == DOMAIN - ), - None, - ) - if ha_id is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="appliance_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - return entry.runtime_data.client, ha_id - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Home Connect component.""" - - async def _async_service_program(call: ServiceCall, start: bool) -> None: - """Execute calls to services taking a program.""" - program = call.data[ATTR_PROGRAM] - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - option_key = call.data.get(ATTR_KEY) - options = ( - [ - Option( - option_key, - call.data[ATTR_VALUE], - unit=call.data.get(ATTR_UNIT), - ) - ] - if option_key is not None - else None - ) - - async_create_issue( - hass, - DOMAIN, - "deprecated_set_program_and_option_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_set_program_and_option_actions", - translation_placeholders={ - "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, - "remove_release": "2025.9.0", - "deprecated_action_yaml": "\n".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_PROGRAM}: {program}", - *([f" {ATTR_KEY}: {options[0].key}"] if options else []), - *([f" {ATTR_VALUE}: {options[0].value}"] if options else []), - *( - [f" {ATTR_UNIT}: {options[0].unit}"] - if options and options[0].unit - else [] - ), - "```", - ] - ), - "new_action_yaml": "\n ".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}", - f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}", - *( - [ - f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}" - ] - if options - else [] - ), - "```", - ] - ), - "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", - }, - ) - - try: - if start: - await client.start_program(ha_id, program_key=program, options=options) - else: - await client.set_selected_program( - ha_id, program_key=program, options=options - ) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="start_program" if start else "select_program", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program, - }, - ) from err - - async def _async_service_set_program_options( - call: ServiceCall, active: bool - ) -> None: - """Execute calls to services taking a program.""" - option_key = call.data[ATTR_KEY] - value = call.data[ATTR_VALUE] - unit = call.data.get(ATTR_UNIT) - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - async_create_issue( - hass, - DOMAIN, - "deprecated_set_program_and_option_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_set_program_and_option_actions", - translation_placeholders={ - "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, - "remove_release": "2025.9.0", - "deprecated_action_yaml": "\n".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_KEY}: {option_key}", - f" {ATTR_VALUE}: {value}", - *([f" {ATTR_UNIT}: {unit}"] if unit else []), - "```", - ] - ), - "new_action_yaml": "\n ".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}", - f" {bsh_key_to_translation_key(option_key)}: {value}", - "```", - ] - ), - "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", - }, - ) - try: - if active: - await client.set_active_program_option( - ha_id, - option_key=option_key, - value=value, - unit=unit, - ) - else: - await client.set_selected_program_option( - ha_id, - option_key=option_key, - value=value, - unit=unit, - ) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_options_active_program" - if active - else "set_options_selected_program", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_KEY: option_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), - }, - ) from err - - async def _async_service_command( - call: ServiceCall, command_key: CommandKey - ) -> None: - """Execute calls to services executing a command.""" - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - async_create_issue( - hass, - DOMAIN, - "deprecated_command_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_command_actions", - ) - - try: - await client.put_command(ha_id, command_key=command_key, value=True) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="execute_command", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - "command": command_key.value, - }, - ) from err - - async def async_service_option_active(call: ServiceCall) -> None: - """Service for setting an option for an active program.""" - await _async_service_set_program_options(call, True) - - async def async_service_option_selected(call: ServiceCall) -> None: - """Service for setting an option for a selected program.""" - await _async_service_set_program_options(call, False) - - async def async_service_setting(call: ServiceCall) -> None: - """Service for changing a setting.""" - key = call.data[ATTR_KEY] - value = call.data[ATTR_VALUE] - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - try: - await client.set_setting(ha_id, setting_key=key, value=value) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_setting", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_KEY: key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), - }, - ) from err - - async def async_service_pause_program(call: ServiceCall) -> None: - """Service for pausing a program.""" - await _async_service_command(call, CommandKey.BSH_COMMON_PAUSE_PROGRAM) - - async def async_service_resume_program(call: ServiceCall) -> None: - """Service for resuming a paused program.""" - await _async_service_command(call, CommandKey.BSH_COMMON_RESUME_PROGRAM) - - async def async_service_select_program(call: ServiceCall) -> None: - """Service for selecting a program.""" - await _async_service_program(call, False) - - async def async_service_set_program_and_options(call: ServiceCall) -> None: - """Service for setting a program and options.""" - data = dict(call.data) - program = data.pop(ATTR_PROGRAM, None) - affects_to = data.pop(ATTR_AFFECTS_TO) - client, ha_id = await _get_client_and_ha_id(hass, data.pop(ATTR_DEVICE_ID)) - - options: list[Option] = [] - - for option, value in data.items(): - if option in PROGRAM_ENUM_OPTIONS: - options.append( - Option( - PROGRAM_ENUM_OPTIONS[option][0], - PROGRAM_ENUM_OPTIONS[option][1][value], - ) - ) - elif option in PROGRAM_OPTIONS: - option_key = PROGRAM_OPTIONS[option][0] - options.append(Option(option_key, value)) - - method_call: Awaitable[Any] - exception_translation_key: str - if program: - program = ( - program - if isinstance(program, ProgramKey) - else TRANSLATION_KEYS_PROGRAMS_MAP[program] - ) - - if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: - method_call = client.start_program( - ha_id, program_key=program, options=options - ) - exception_translation_key = "start_program" - elif affects_to == AFFECTS_TO_SELECTED_PROGRAM: - method_call = client.set_selected_program( - ha_id, program_key=program, options=options - ) - exception_translation_key = "select_program" - else: - array_of_options = ArrayOfOptions(options) - if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: - method_call = client.set_active_program_options( - ha_id, array_of_options=array_of_options - ) - exception_translation_key = "set_options_active_program" - else: - # affects_to is AFFECTS_TO_SELECTED_PROGRAM - method_call = client.set_selected_program_options( - ha_id, array_of_options=array_of_options - ) - exception_translation_key = "set_options_selected_program" - - try: - await method_call - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key=exception_translation_key, - translation_placeholders={ - **get_dict_from_home_connect_error(err), - **( - {SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program} - if program - else {} - ), - }, - ) from err - - async def async_service_start_program(call: ServiceCall) -> None: - """Service for starting a program.""" - await _async_service_program(call, True) - - hass.services.async_register( - DOMAIN, - SERVICE_OPTION_ACTIVE, - async_service_option_active, - schema=SERVICE_OPTION_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_OPTION_SELECTED, - async_service_option_selected, - schema=SERVICE_OPTION_SCHEMA, - ) - hass.services.async_register( - DOMAIN, SERVICE_SETTING, async_service_setting, schema=SERVICE_SETTING_SCHEMA - ) - hass.services.async_register( - DOMAIN, - SERVICE_PAUSE_PROGRAM, - async_service_pause_program, - schema=SERVICE_COMMAND_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_RESUME_PROGRAM, - async_service_resume_program, - schema=SERVICE_COMMAND_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_SELECT_PROGRAM, - async_service_select_program, - schema=SERVICE_PROGRAM_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_START_PROGRAM, - async_service_start_program, - schema=SERVICE_PROGRAM_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_SET_PROGRAM_AND_OPTIONS, - async_service_set_program_and_options, - schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA, - ) - + register_actions(hass) return True @@ -611,18 +54,31 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeConnectConfigEntry) session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) config_entry_auth = AsyncConfigEntryAuth(hass, session) + try: + await config_entry_auth.async_get_access_token() + except aiohttp.ClientResponseError as err: + if 400 <= err.status < 500: + raise ConfigEntryAuthFailed from err + raise ConfigEntryNotReady from err + except aiohttp.ClientError as err: + raise ConfigEntryNotReady from err home_connect_client = HomeConnectClient(config_entry_auth) coordinator = HomeConnectCoordinator(hass, entry, home_connect_client) - await coordinator.async_config_entry_first_refresh() - + await coordinator.async_setup() entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.runtime_data.start_event_listener() + entry.async_create_background_task( + hass, + coordinator.async_refresh(), + f"home_connect-initial-full-refresh-{entry.entry_id}", + ) + return True diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index 1f82aa71766..b7b7e50047e 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -106,8 +106,26 @@ BINARY_SENSORS = ( key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_COMMON, boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, device_class=BinarySensorDeviceClass.DOOR, + translation_key="common_chiller_door", + ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, translation_key="chiller_door", ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_LEFT, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="left_chiller_door", + ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_RIGHT, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="right_chiller_door", + ), HomeConnectBinarySensorEntityDescription( key=StatusKey.REFRIGERATION_COMMON_DOOR_FLEX_COMPARTMENT, boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, diff --git a/homeassistant/components/home_connect/button.py b/homeassistant/components/home_connect/button.py index 0a5538ec588..0bd31c6b7c9 100644 --- a/homeassistant/components/home_connect/button.py +++ b/homeassistant/components/home_connect/button.py @@ -1,6 +1,6 @@ """Provides button entities for Home Connect.""" -from aiohomeconnect.model import CommandKey, EventKey +from aiohomeconnect.model import CommandKey from aiohomeconnect.model.error import HomeConnectError from homeassistant.components.button import ButtonEntity, ButtonEntityDescription @@ -94,15 +94,9 @@ class HomeConnectButtonEntity(HomeConnectEntity, ButtonEntity): super().__init__( coordinator, appliance, - # The entity is subscribed to the appliance connected event, - # but it will receive also the disconnected event - ButtonEntityDescription( - key=EventKey.BSH_COMMON_APPLIANCE_CONNECTED, - ), + desc, + (appliance.info.ha_id,), ) - self.entity_description = desc - self.appliance = appliance - self.unique_id = f"{appliance.info.ha_id}-{desc.key}" def update_native_value(self) -> None: """Set the value of the entity.""" diff --git a/homeassistant/components/home_connect/common.py b/homeassistant/components/home_connect/common.py index f52b59bc213..cd3fefad80c 100644 --- a/homeassistant/components/home_connect/common.py +++ b/homeassistant/components/home_connect/common.py @@ -137,41 +137,6 @@ def setup_home_connect_entry( defaultdict(list) ) - entities: list[HomeConnectEntity] = [] - for appliance in entry.runtime_data.data.values(): - entities_to_add = get_entities_for_appliance(entry, appliance) - if get_option_entities_for_appliance: - entities_to_add.extend(get_option_entities_for_appliance(entry, appliance)) - for event_key in ( - EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM, - EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM, - ): - changed_options_listener_remove_callback = ( - entry.runtime_data.async_add_listener( - partial( - _create_option_entities, - entry, - appliance, - known_entity_unique_ids, - get_option_entities_for_appliance, - async_add_entities, - ), - (appliance.info.ha_id, event_key), - ) - ) - entry.async_on_unload(changed_options_listener_remove_callback) - changed_options_listener_remove_callbacks[appliance.info.ha_id].append( - changed_options_listener_remove_callback - ) - known_entity_unique_ids.update( - { - cast(str, entity.unique_id): appliance.info.ha_id - for entity in entities_to_add - } - ) - entities.extend(entities_to_add) - async_add_entities(entities) - entry.async_on_unload( entry.runtime_data.async_add_special_listener( partial( diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index 66c635f5d95..64bf4af29a4 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -10,6 +10,7 @@ from .utils import bsh_key_to_translation_key DOMAIN = "home_connect" +API_DEFAULT_RETRY_AFTER = 60 APPLIANCES_WITH_PROGRAMS = ( "CleaningRobot", @@ -78,13 +79,6 @@ ATTR_VALUE = "value" AFFECTS_TO_ACTIVE_PROGRAM = "active_program" AFFECTS_TO_SELECTED_PROGRAM = "selected_program" -SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity" -SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name" -SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID = "entity_id" -SVE_TRANSLATION_PLACEHOLDER_PROGRAM = "program" -SVE_TRANSLATION_PLACEHOLDER_KEY = "key" -SVE_TRANSLATION_PLACEHOLDER_VALUE = "value" - TRANSLATION_KEYS_PROGRAMS_MAP = { bsh_key_to_translation_key(program.value): cast(ProgramKey, program) @@ -284,7 +278,9 @@ SPIN_SPEED_OPTIONS = { "LaundryCare.Washer.EnumType.SpinSpeed.Off", "LaundryCare.Washer.EnumType.SpinSpeed.RPM400", "LaundryCare.Washer.EnumType.SpinSpeed.RPM600", + "LaundryCare.Washer.EnumType.SpinSpeed.RPM700", "LaundryCare.Washer.EnumType.SpinSpeed.RPM800", + "LaundryCare.Washer.EnumType.SpinSpeed.RPM900", "LaundryCare.Washer.EnumType.SpinSpeed.RPM1000", "LaundryCare.Washer.EnumType.SpinSpeed.RPM1200", "LaundryCare.Washer.EnumType.SpinSpeed.RPM1400", diff --git a/homeassistant/components/home_connect/coordinator.py b/homeassistant/components/home_connect/coordinator.py index dfac68084d1..079db6b148e 100644 --- a/homeassistant/components/home_connect/coordinator.py +++ b/homeassistant/components/home_connect/coordinator.py @@ -2,7 +2,7 @@ from __future__ import annotations -import asyncio +from asyncio import sleep as asyncio_sleep from collections import defaultdict from collections.abc import Callable from dataclasses import dataclass @@ -29,6 +29,7 @@ from aiohomeconnect.model.error import ( HomeConnectApiError, HomeConnectError, HomeConnectRequestError, + TooManyRequestsError, UnauthorizedError, ) from aiohomeconnect.model.program import EnumerateProgram, ProgramDefinitionOption @@ -36,11 +37,11 @@ from propcache.api import cached_property from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import APPLIANCES_WITH_PROGRAMS, DOMAIN +from .const import API_DEFAULT_RETRY_AFTER, APPLIANCES_WITH_PROGRAMS, DOMAIN from .utils import get_dict_from_home_connect_error _LOGGER = logging.getLogger(__name__) @@ -231,15 +232,15 @@ class HomeConnectCoordinator( self.data[event_message_ha_id].update(appliance_data) else: self.data[event_message_ha_id] = appliance_data - for listener, context in list( - self._special_listeners.values() - ) + list(self._listeners.values()): - assert isinstance(context, tuple) + for listener, context in self._special_listeners.values(): if ( EventKey.BSH_COMMON_APPLIANCE_DEPAIRED not in context ): listener() + self._call_all_event_listeners_for_appliance( + event_message_ha_id + ) case EventType.DISCONNECTED: self.data[event_message_ha_id].info.connected = False @@ -269,7 +270,7 @@ class HomeConnectCoordinator( error, retry_time, ) - await asyncio.sleep(retry_time) + await asyncio_sleep(retry_time) retry_time = min(retry_time * 2, 3600) except HomeConnectApiError as error: _LOGGER.error("Error while listening for events: %s", error) @@ -295,6 +296,42 @@ class HomeConnectCoordinator( async def _async_update_data(self) -> dict[str, HomeConnectApplianceData]: """Fetch data from Home Connect.""" + await self._async_setup() + + for appliance_data in self.data.values(): + appliance = appliance_data.info + ha_id = appliance.ha_id + while True: + try: + self.data[ha_id] = await self._get_appliance_data( + appliance, self.data.get(ha_id) + ) + except TooManyRequestsError as err: + _LOGGER.debug( + "Rate limit exceeded on initial fetch: %s", + err, + ) + await asyncio_sleep(err.retry_after or API_DEFAULT_RETRY_AFTER) + else: + break + + for listener, context in self._special_listeners.values(): + assert isinstance(context, tuple) + if EventKey.BSH_COMMON_APPLIANCE_PAIRED in context: + listener() + + return self.data + + async def async_setup(self) -> None: + """Set up the devices.""" + try: + await self._async_setup() + except UpdateFailed as err: + raise ConfigEntryNotReady from err + + async def _async_setup(self) -> None: + """Set up the devices.""" + old_appliances = set(self.data.keys()) try: appliances = await self.client.get_home_appliances() except UnauthorizedError as error: @@ -312,12 +349,45 @@ class HomeConnectCoordinator( translation_placeholders=get_dict_from_home_connect_error(error), ) from error - return { - appliance.ha_id: await self._get_appliance_data( - appliance, self.data.get(appliance.ha_id) + for appliance in appliances.homeappliances: + self.device_registry.async_get_or_create( + config_entry_id=self.config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + manufacturer=appliance.brand, + name=appliance.name, + model=appliance.vib, ) - for appliance in appliances.homeappliances - } + if appliance.ha_id not in self.data: + self.data[appliance.ha_id] = HomeConnectApplianceData( + commands=set(), + events={}, + info=appliance, + options={}, + programs=[], + settings={}, + status={}, + ) + else: + self.data[appliance.ha_id].info.connected = appliance.connected + old_appliances.remove(appliance.ha_id) + + for ha_id in old_appliances: + self.data.pop(ha_id, None) + device = self.device_registry.async_get_device( + identifiers={(DOMAIN, ha_id)} + ) + if device: + self.device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.config_entry.entry_id, + ) + + # Trigger to delete the possible depaired device entities + # from known_entities variable at common.py + for listener, context in self._special_listeners.values(): + assert isinstance(context, tuple) + if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context: + listener() async def _get_appliance_data( self, @@ -339,6 +409,8 @@ class HomeConnectCoordinator( await self.client.get_settings(appliance.ha_id) ).settings } + except TooManyRequestsError: + raise except HomeConnectError as error: _LOGGER.debug( "Error fetching settings for %s: %s", @@ -351,6 +423,8 @@ class HomeConnectCoordinator( status.key: status for status in (await self.client.get_status(appliance.ha_id)).status } + except TooManyRequestsError: + raise except HomeConnectError as error: _LOGGER.debug( "Error fetching status for %s: %s", @@ -365,6 +439,8 @@ class HomeConnectCoordinator( if appliance.type in APPLIANCES_WITH_PROGRAMS: try: all_programs = await self.client.get_all_programs(appliance.ha_id) + except TooManyRequestsError: + raise except HomeConnectError as error: _LOGGER.debug( "Error fetching programs for %s: %s", @@ -421,6 +497,8 @@ class HomeConnectCoordinator( await self.client.get_available_commands(appliance.ha_id) ).commands } + except TooManyRequestsError: + raise except HomeConnectError: commands = set() @@ -455,6 +533,8 @@ class HomeConnectCoordinator( ).options or [] } + except TooManyRequestsError: + raise except HomeConnectError as error: _LOGGER.debug( "Error fetching options for %s: %s", diff --git a/homeassistant/components/home_connect/entity.py b/homeassistant/components/home_connect/entity.py index b55ff374f34..facb3b14a9b 100644 --- a/homeassistant/components/home_connect/entity.py +++ b/homeassistant/components/home_connect/entity.py @@ -1,21 +1,28 @@ """Home Connect entity base class.""" from abc import abstractmethod +from collections.abc import Callable, Coroutine import contextlib +from datetime import datetime import logging -from typing import cast +from typing import Any, Concatenate, cast from aiohomeconnect.model import EventKey, OptionKey -from aiohomeconnect.model.error import ActiveProgramNotSetError, HomeConnectError +from aiohomeconnect.model.error import ( + ActiveProgramNotSetError, + HomeConnectError, + TooManyRequestsError, +) from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.event import async_call_later from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import API_DEFAULT_RETRY_AFTER, DOMAIN from .coordinator import HomeConnectApplianceData, HomeConnectCoordinator from .utils import get_dict_from_home_connect_error @@ -33,9 +40,13 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]): coordinator: HomeConnectCoordinator, appliance: HomeConnectApplianceData, desc: EntityDescription, + context_override: Any | None = None, ) -> None: """Initialize the entity.""" - super().__init__(coordinator, (appliance.info.ha_id, EventKey(desc.key))) + context = (appliance.info.ha_id, EventKey(desc.key)) + if context_override is not None: + context = context_override + super().__init__(coordinator, context) self.appliance = appliance self.entity_description = desc self._attr_unique_id = f"{appliance.info.ha_id}-{desc.key}" @@ -127,3 +138,34 @@ class HomeConnectOptionEntity(HomeConnectEntity): def bsh_key(self) -> OptionKey: """Return the BSH key.""" return cast(OptionKey, self.entity_description.key) + + +def constraint_fetcher[_EntityT: HomeConnectEntity, **_P]( + func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Decorate the function to catch Home Connect too many requests error and retry later. + + If it needs to be called later, it will call async_write_ha_state function + """ + + async def handler_to_return( + self: _EntityT, *args: _P.args, **kwargs: _P.kwargs + ) -> None: + async def handler(_datetime: datetime | None = None) -> None: + try: + await func(self, *args, **kwargs) + except TooManyRequestsError as err: + if (retry_after := err.retry_after) is None: + retry_after = API_DEFAULT_RETRY_AFTER + async_call_later(self.hass, retry_after, handler) + except HomeConnectError as err: + _LOGGER.error( + "Error fetching constraints for %s: %s", self.entity_id, err + ) + else: + if _datetime is not None: + self.async_write_ha_state() + + await handler() + + return handler_to_return diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index f781db3ab24..9b4c9276998 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -113,7 +113,7 @@ "milk_counter": { "default": "mdi:cup" }, - "coffee_and_milk": { + "coffee_and_milk_counter": { "default": "mdi:coffee" }, "ristretto_espresso_counter": { diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 72c6b9aaa2b..de55a60bd43 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -21,11 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.util import color as color_util from .common import setup_home_connect_entry -from .const import ( - BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, - DOMAIN, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, -) +from .const import BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, DOMAIN from .coordinator import ( HomeConnectApplianceData, HomeConnectConfigEntry, @@ -164,7 +160,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="turn_on_light", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err if self._color_key and self._custom_color_key: @@ -183,7 +179,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="select_light_custom_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err @@ -201,7 +197,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err return @@ -211,11 +207,13 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): brightness = round( color_util.brightness_to_value( self._brightness_scale, - kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness), + cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)), ) ) - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) rgb = color_util.color_hsv_to_RGB(hs_color[0], hs_color[1], brightness) hex_val = color_util.color_rgb_to_hex(*rgb) @@ -231,7 +229,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err return @@ -254,7 +252,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_brightness", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err @@ -272,7 +270,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="turn_off_light", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index cef35005b32..1bb793f4015 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -1,4 +1,4 @@ -"""Provides number enties for Home Connect.""" +"""Provides number entities for Home Connect.""" import logging from typing import cast @@ -16,16 +16,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .common import setup_home_connect_entry -from .const import ( - DOMAIN, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, - UNIT_MAP, -) +from .const import DOMAIN, UNIT_MAP from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry -from .entity import HomeConnectEntity, HomeConnectOptionEntity +from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher from .utils import get_dict_from_home_connect_error _LOGGER = logging.getLogger(__name__) @@ -33,6 +26,11 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 1 NUMBERS = ( + NumberEntityDescription( + key=SettingKey.BSH_COMMON_ALARM_CLOCK, + device_class=NumberDeviceClass.DURATION, + translation_key="alarm_clock", + ), NumberEntityDescription( key=SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR, device_class=NumberDeviceClass.TEMPERATURE, @@ -180,28 +178,34 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": str(value), }, ) from err + @constraint_fetcher async def async_fetch_constraints(self) -> None: """Fetch the max and min values and step for the number entity.""" - try: + setting_key = cast(SettingKey, self.bsh_key) + data = self.appliance.settings.get(setting_key) + if not data or not data.unit or not data.constraints: data = await self.coordinator.client.get_setting( - self.appliance.info.ha_id, setting_key=SettingKey(self.bsh_key) + self.appliance.info.ha_id, setting_key=setting_key ) - except HomeConnectError as err: - _LOGGER.error("An error occurred: %s", err) - else: + if data.unit: + self._attr_native_unit_of_measurement = data.unit self.set_constraints(data) def set_constraints(self, setting: GetSetting) -> None: """Set constraints for the number entity.""" + if setting.unit: + self._attr_native_unit_of_measurement = UNIT_MAP.get( + setting.unit, setting.unit + ) if not (constraints := setting.constraints): return if constraints.max: @@ -222,10 +226,10 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): """When entity is added to hass.""" await super().async_added_to_hass() data = self.appliance.settings[cast(SettingKey, self.bsh_key)] - self._attr_native_unit_of_measurement = data.unit self.set_constraints(data) if ( - not hasattr(self, "_attr_native_min_value") + not hasattr(self, "_attr_native_unit_of_measurement") + or not hasattr(self, "_attr_native_min_value") or not hasattr(self, "_attr_native_max_value") or not hasattr(self, "_attr_native_step") ): @@ -253,7 +257,6 @@ class HomeConnectOptionNumberEntity(HomeConnectOptionEntity, NumberEntity): or candidate_unit != self._attr_native_unit_of_measurement ): self._attr_native_unit_of_measurement = candidate_unit - self.__dict__.pop("unit_of_measurement", None) option_constraints = option_definition.constraints if option_constraints: if ( diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py index ef3e2ccbf82..c82e0686cb5 100644 --- a/homeassistant/components/home_connect/select.py +++ b/homeassistant/components/home_connect/select.py @@ -1,8 +1,8 @@ """Provides a select platform for Home Connect.""" from collections.abc import Callable, Coroutine -import contextlib from dataclasses import dataclass +import logging from typing import Any, cast from aiohomeconnect.client import Client as HomeConnectClient @@ -31,11 +31,6 @@ from .const import ( INTENSIVE_LEVEL_OPTIONS, PROGRAMS_TRANSLATION_KEYS_MAP, SPIN_SPEED_OPTIONS, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_VALUE, TEMPERATURE_OPTIONS, TRANSLATION_KEYS_PROGRAMS_MAP, VARIO_PERFECT_OPTIONS, @@ -47,9 +42,11 @@ from .coordinator import ( HomeConnectConfigEntry, HomeConnectCoordinator, ) -from .entity import HomeConnectEntity, HomeConnectOptionEntity +from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error +_LOGGER = logging.getLogger(__name__) + PARALLEL_UPDATES = 1 FUNCTIONAL_LIGHT_COLOR_TEMPERATURE_ENUM = { @@ -404,7 +401,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): translation_key=self.entity_description.error_translation_key, translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program_key.value, + "program": program_key.value, }, ) from err @@ -413,6 +410,7 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity): """Select setting class for Home Connect.""" entity_description: HomeConnectSelectEntityDescription + _original_option_keys: set[str | None] def __init__( self, @@ -421,6 +419,7 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity): desc: HomeConnectSelectEntityDescription, ) -> None: """Initialize the entity.""" + self._original_option_keys = set(desc.values_translation_key) super().__init__( coordinator, appliance, @@ -439,12 +438,12 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: value, + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": value, }, ) from err @@ -458,23 +457,29 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity): async def async_added_to_hass(self) -> None: """When entity is added to hass.""" await super().async_added_to_hass() + await self.async_fetch_options() + + @constraint_fetcher + async def async_fetch_options(self) -> None: + """Fetch options from the API.""" setting = self.appliance.settings.get(cast(SettingKey, self.bsh_key)) if ( not setting or not setting.constraints or not setting.constraints.allowed_values ): - with contextlib.suppress(HomeConnectError): - setting = await self.coordinator.client.get_setting( - self.appliance.info.ha_id, - setting_key=cast(SettingKey, self.bsh_key), - ) + setting = await self.coordinator.client.get_setting( + self.appliance.info.ha_id, + setting_key=cast(SettingKey, self.bsh_key), + ) if setting and setting.constraints and setting.constraints.allowed_values: + self._original_option_keys = set(setting.constraints.allowed_values) self._attr_options = [ self.entity_description.values_translation_key[option] - for option in setting.constraints.allowed_values - if option in self.entity_description.values_translation_key + for option in self._original_option_keys + if option is not None + and option in self.entity_description.values_translation_key ] @@ -491,7 +496,7 @@ class HomeConnectSelectOptionEntity(HomeConnectOptionEntity, SelectEntity): desc: HomeConnectSelectEntityDescription, ) -> None: """Initialize the entity.""" - self._original_option_keys = set(desc.values_translation_key.keys()) + self._original_option_keys = set(desc.values_translation_key) super().__init__( coordinator, appliance, @@ -524,5 +529,5 @@ class HomeConnectSelectOptionEntity(HomeConnectOptionEntity, SelectEntity): self.entity_description.values_translation_key[option] for option in self._original_option_keys if option is not None + and option in self.entity_description.values_translation_key ] - self.__dict__.pop("options", None) diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index c12e1b7b6e4..f3c73c8a5ff 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -1,12 +1,11 @@ """Provides a sensor for Home Connect.""" -import contextlib from dataclasses import dataclass from datetime import timedelta +import logging from typing import cast from aiohomeconnect.model import EventKey, StatusKey -from aiohomeconnect.model.error import HomeConnectError from homeassistant.components.sensor import ( SensorDeviceClass, @@ -28,7 +27,9 @@ from .const import ( UNIT_MAP, ) from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry -from .entity import HomeConnectEntity +from .entity import HomeConnectEntity, constraint_fetcher + +_LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 @@ -52,7 +53,7 @@ BSH_PROGRAM_SENSORS = ( device_class=SensorDeviceClass.TIMESTAMP, translation_key="program_finish_time", appliance_types=( - "CoffeMaker", + "CoffeeMaker", "CookProcessor", "Dishwasher", "Dryer", @@ -194,28 +195,76 @@ SENSORS = ( EVENT_SENSORS = ( HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_ABORTED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="freezer_door_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="program_aborted", + appliance_types=("Dishwasher", "CleaningRobot", "CookProcessor"), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="refrigerator_door_alarm", - appliance_types=("FridgeFreezer", "Refrigerator"), + translation_key="program_finished", + appliance_types=( + "Oven", + "Dishwasher", + "Washer", + "Dryer", + "WasherDryer", + "CleaningRobot", + "CookProcessor", + ), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_ALARM_CLOCK_ELAPSED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="freezer_temperature_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="alarm_clock_elapsed", + appliance_types=("Oven", "Cooktop"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_OVEN_EVENT_PREHEAT_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="preheat_finished", + appliance_types=("Oven", "Cooktop"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_OVEN_EVENT_REGULAR_PREHEAT_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="regular_preheat_finished", + appliance_types=("Oven",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_DRYER_EVENT_DRYING_PROCESS_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="drying_process_finished", + appliance_types=("Dryer",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="salt_nearly_empty", + appliance_types=("Dishwasher",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.DISHCARE_DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="rinse_aid_nearly_empty", + appliance_types=("Dishwasher",), ), HomeConnectSensorEntityDescription( key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, @@ -242,20 +291,220 @@ EVENT_SENSORS = ( appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( - key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_KEEP_MILK_TANK_COOL, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="salt_nearly_empty", - appliance_types=("Dishwasher",), + translation_key="keep_milk_tank_cool", + appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( - key=EventKey.DISHCARE_DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_20_CUPS, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="rinse_aid_nearly_empty", - appliance_types=("Dishwasher",), + translation_key="descaling_in_20_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_15_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_15_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_10_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_10_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_5_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_5_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_DESCALED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_descaled", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_descaling_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_descaling_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CLEANING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_cleaning_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN20CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in20cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN15CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in15cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN10CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in10cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN5CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in5cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CALC_N_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_calc_n_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_calc_n_clean_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_calc_n_clean_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_door_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="refrigerator_door_alarm", + appliance_types=("FridgeFreezer", "Refrigerator"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_temperature_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_EMPTY_DUST_BOX_AND_CLEAN_FILTER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="empty_dust_box_and_clean_filter", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_ROBOT_IS_STUCK, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="robot_is_stuck", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_DOCKING_STATION_NOT_FOUND, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="docking_station_not_found", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_1_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="poor_i_dos_1_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_2_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="poor_i_dos_2_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_NEARLY_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="grease_filter_max_saturation_nearly_reached", + appliance_types=("Hood",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="grease_filter_max_saturation_reached", + appliance_types=("Hood",), ), ) @@ -335,16 +584,14 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity): else: await self.fetch_unit() + @constraint_fetcher async def fetch_unit(self) -> None: """Fetch the unit of measurement.""" - with contextlib.suppress(HomeConnectError): - data = await self.coordinator.client.get_status_value( - self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key) - ) - if data.unit: - self._attr_native_unit_of_measurement = UNIT_MAP.get( - data.unit, data.unit - ) + data = await self.coordinator.client.get_status_value( + self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key) + ) + if data.unit: + self._attr_native_unit_of_measurement = UNIT_MAP.get(data.unit, data.unit) class HomeConnectProgramSensor(HomeConnectSensor): diff --git a/homeassistant/components/home_connect/services.py b/homeassistant/components/home_connect/services.py new file mode 100644 index 00000000000..fac1c5fe1a9 --- /dev/null +++ b/homeassistant/components/home_connect/services.py @@ -0,0 +1,572 @@ +"""Custom actions (previously known as services) for the Home Connect integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable +from typing import Any, cast + +from aiohomeconnect.client import Client as HomeConnectClient +from aiohomeconnect.model import ( + ArrayOfOptions, + CommandKey, + Option, + OptionKey, + ProgramKey, + SettingKey, +) +from aiohomeconnect.model.error import HomeConnectError +import voluptuous as vol + +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import ( + AFFECTS_TO_ACTIVE_PROGRAM, + AFFECTS_TO_SELECTED_PROGRAM, + ATTR_AFFECTS_TO, + ATTR_KEY, + ATTR_PROGRAM, + ATTR_UNIT, + ATTR_VALUE, + DOMAIN, + PROGRAM_ENUM_OPTIONS, + SERVICE_OPTION_ACTIVE, + SERVICE_OPTION_SELECTED, + SERVICE_PAUSE_PROGRAM, + SERVICE_RESUME_PROGRAM, + SERVICE_SELECT_PROGRAM, + SERVICE_SET_PROGRAM_AND_OPTIONS, + SERVICE_SETTING, + SERVICE_START_PROGRAM, + TRANSLATION_KEYS_PROGRAMS_MAP, +) +from .coordinator import HomeConnectConfigEntry +from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +PROGRAM_OPTIONS = { + bsh_key_to_translation_key(key): ( + key, + value, + ) + for key, value in { + OptionKey.BSH_COMMON_DURATION: int, + OptionKey.BSH_COMMON_START_IN_RELATIVE: int, + OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int, + OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int, + OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool, + OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool, + OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool, + OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool, + OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool, + OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool, + OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool, + OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int, + OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool, + OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool, + OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool, + }.items() +} + + +SERVICE_SETTING_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(SettingKey), + vol.NotIn([SettingKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(str, int, bool), + } +) + +# DEPRECATED: Remove in 2025.9.0 +SERVICE_OPTION_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(OptionKey), + vol.NotIn([OptionKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(str, int, bool), + vol.Optional(ATTR_UNIT): str, + } +) + +# DEPRECATED: Remove in 2025.9.0 +SERVICE_PROGRAM_SCHEMA = vol.Any( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_PROGRAM): vol.All( + vol.Coerce(ProgramKey), + vol.NotIn([ProgramKey.UNKNOWN]), + ), + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(OptionKey), + vol.NotIn([OptionKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(int, str), + vol.Optional(ATTR_UNIT): str, + }, + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_PROGRAM): vol.All( + vol.Coerce(ProgramKey), + vol.NotIn([ProgramKey.UNKNOWN]), + ), + }, +) + + +def _require_program_or_at_least_one_option(data: dict) -> dict: + if ATTR_PROGRAM not in data and not any( + option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS) + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="required_program_or_one_option_at_least", + ) + return data + + +SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All( + vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_AFFECTS_TO): vol.In( + [AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM] + ), + vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()), + } + ) + .extend( + { + vol.Optional(translation_key): vol.In(allowed_values.keys()) + for translation_key, ( + key, + allowed_values, + ) in PROGRAM_ENUM_OPTIONS.items() + } + ) + .extend( + { + vol.Optional(translation_key): schema + for translation_key, (key, schema) in PROGRAM_OPTIONS.items() + } + ), + _require_program_or_at_least_one_option, +) + +SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str}) + + +async def _get_client_and_ha_id( + hass: HomeAssistant, device_id: str +) -> tuple[HomeConnectClient, str]: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(device_id) + if device_entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="device_entry_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + entry: HomeConnectConfigEntry | None = None + for entry_id in device_entry.config_entries: + _entry = hass.config_entries.async_get_entry(entry_id) + assert _entry + if _entry.domain == DOMAIN: + entry = cast(HomeConnectConfigEntry, _entry) + break + if entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="config_entry_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + + ha_id = next( + ( + identifier[1] + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + ), + None, + ) + if ha_id is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="appliance_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + return entry.runtime_data.client, ha_id + + +async def _async_service_program(call: ServiceCall, start: bool) -> None: + """Execute calls to services taking a program.""" + program = call.data[ATTR_PROGRAM] + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + option_key = call.data.get(ATTR_KEY) + options = ( + [ + Option( + option_key, + call.data[ATTR_VALUE], + unit=call.data.get(ATTR_UNIT), + ) + ] + if option_key is not None + else None + ) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_set_program_and_option_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_set_program_and_option_actions", + translation_placeholders={ + "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, + "remove_release": "2025.9.0", + "deprecated_action_yaml": "\n".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_PROGRAM}: {program}", + *([f" {ATTR_KEY}: {options[0].key}"] if options else []), + *([f" {ATTR_VALUE}: {options[0].value}"] if options else []), + *( + [f" {ATTR_UNIT}: {options[0].unit}"] + if options and options[0].unit + else [] + ), + "```", + ] + ), + "new_action_yaml": "\n ".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}", + f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}", + *( + [ + f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}" + ] + if options + else [] + ), + "```", + ] + ), + "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", + }, + ) + + try: + if start: + await client.start_program(ha_id, program_key=program, options=options) + else: + await client.set_selected_program( + ha_id, program_key=program, options=options + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="start_program" if start else "select_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": program, + }, + ) from err + + +async def _async_service_set_program_options(call: ServiceCall, active: bool) -> None: + """Execute calls to services taking a program.""" + option_key = call.data[ATTR_KEY] + value = call.data[ATTR_VALUE] + unit = call.data.get(ATTR_UNIT) + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_set_program_and_option_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_set_program_and_option_actions", + translation_placeholders={ + "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, + "remove_release": "2025.9.0", + "deprecated_action_yaml": "\n".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_KEY}: {option_key}", + f" {ATTR_VALUE}: {value}", + *([f" {ATTR_UNIT}: {unit}"] if unit else []), + "```", + ] + ), + "new_action_yaml": "\n ".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}", + f" {bsh_key_to_translation_key(option_key)}: {value}", + "```", + ] + ), + "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", + }, + ) + try: + if active: + await client.set_active_program_option( + ha_id, + option_key=option_key, + value=value, + unit=unit, + ) + else: + await client.set_selected_program_option( + ha_id, + option_key=option_key, + value=value, + unit=unit, + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_options_active_program" + if active + else "set_options_selected_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "key": option_key, + "value": str(value), + }, + ) from err + + +async def _async_service_command(call: ServiceCall, command_key: CommandKey) -> None: + """Execute calls to services executing a command.""" + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_command_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_command_actions", + ) + + try: + await client.put_command(ha_id, command_key=command_key, value=True) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="execute_command", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "command": command_key.value, + }, + ) from err + + +async def async_service_option_active(call: ServiceCall) -> None: + """Service for setting an option for an active program.""" + await _async_service_set_program_options(call, True) + + +async def async_service_option_selected(call: ServiceCall) -> None: + """Service for setting an option for a selected program.""" + await _async_service_set_program_options(call, False) + + +async def async_service_setting(call: ServiceCall) -> None: + """Service for changing a setting.""" + key = call.data[ATTR_KEY] + value = call.data[ATTR_VALUE] + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + try: + await client.set_setting(ha_id, setting_key=key, value=value) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_setting", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "key": key, + "value": str(value), + }, + ) from err + + +async def async_service_pause_program(call: ServiceCall) -> None: + """Service for pausing a program.""" + await _async_service_command(call, CommandKey.BSH_COMMON_PAUSE_PROGRAM) + + +async def async_service_resume_program(call: ServiceCall) -> None: + """Service for resuming a paused program.""" + await _async_service_command(call, CommandKey.BSH_COMMON_RESUME_PROGRAM) + + +async def async_service_select_program(call: ServiceCall) -> None: + """Service for selecting a program.""" + await _async_service_program(call, False) + + +async def async_service_set_program_and_options(call: ServiceCall) -> None: + """Service for setting a program and options.""" + data = dict(call.data) + program = data.pop(ATTR_PROGRAM, None) + affects_to = data.pop(ATTR_AFFECTS_TO) + client, ha_id = await _get_client_and_ha_id(call.hass, data.pop(ATTR_DEVICE_ID)) + + options: list[Option] = [] + + for option, value in data.items(): + if option in PROGRAM_ENUM_OPTIONS: + options.append( + Option( + PROGRAM_ENUM_OPTIONS[option][0], + PROGRAM_ENUM_OPTIONS[option][1][value], + ) + ) + elif option in PROGRAM_OPTIONS: + option_key = PROGRAM_OPTIONS[option][0] + options.append(Option(option_key, value)) + + method_call: Awaitable[Any] + exception_translation_key: str + if program: + program = ( + program + if isinstance(program, ProgramKey) + else TRANSLATION_KEYS_PROGRAMS_MAP[program] + ) + + if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: + method_call = client.start_program( + ha_id, program_key=program, options=options + ) + exception_translation_key = "start_program" + elif affects_to == AFFECTS_TO_SELECTED_PROGRAM: + method_call = client.set_selected_program( + ha_id, program_key=program, options=options + ) + exception_translation_key = "select_program" + else: + array_of_options = ArrayOfOptions(options) + if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: + method_call = client.set_active_program_options( + ha_id, array_of_options=array_of_options + ) + exception_translation_key = "set_options_active_program" + else: + # affects_to is AFFECTS_TO_SELECTED_PROGRAM + method_call = client.set_selected_program_options( + ha_id, array_of_options=array_of_options + ) + exception_translation_key = "set_options_selected_program" + + try: + await method_call + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=exception_translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + **({"program": program} if program else {}), + }, + ) from err + + +async def async_service_start_program(call: ServiceCall) -> None: + """Service for starting a program.""" + await _async_service_program(call, True) + + +def register_actions(hass: HomeAssistant) -> None: + """Register custom actions.""" + + hass.services.async_register( + DOMAIN, + SERVICE_OPTION_ACTIVE, + async_service_option_active, + schema=SERVICE_OPTION_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_OPTION_SELECTED, + async_service_option_selected, + schema=SERVICE_OPTION_SCHEMA, + ) + hass.services.async_register( + DOMAIN, SERVICE_SETTING, async_service_setting, schema=SERVICE_SETTING_SCHEMA + ) + hass.services.async_register( + DOMAIN, + SERVICE_PAUSE_PROGRAM, + async_service_pause_program, + schema=SERVICE_COMMAND_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_RESUME_PROGRAM, + async_service_resume_program, + schema=SERVICE_COMMAND_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SELECT_PROGRAM, + async_service_select_program, + schema=SERVICE_PROGRAM_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_START_PROGRAM, + async_service_start_program, + schema=SERVICE_PROGRAM_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SET_PROGRAM_AND_OPTIONS, + async_service_set_program_and_options, + schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA, + ) diff --git a/homeassistant/components/home_connect/services.yaml b/homeassistant/components/home_connect/services.yaml index 91b0089d653..2b53090fd34 100644 --- a/homeassistant/components/home_connect/services.yaml +++ b/homeassistant/components/home_connect/services.yaml @@ -468,11 +468,11 @@ set_program_and_options: translation_key: venting_level options: - cooking_hood_enum_type_stage_fan_off - - cooking_hood_enum_type_stage_fan_stage01 - - cooking_hood_enum_type_stage_fan_stage02 - - cooking_hood_enum_type_stage_fan_stage03 - - cooking_hood_enum_type_stage_fan_stage04 - - cooking_hood_enum_type_stage_fan_stage05 + - cooking_hood_enum_type_stage_fan_stage_01 + - cooking_hood_enum_type_stage_fan_stage_02 + - cooking_hood_enum_type_stage_fan_stage_03 + - cooking_hood_enum_type_stage_fan_stage_04 + - cooking_hood_enum_type_stage_fan_stage_05 cooking_hood_option_intensive_level: example: cooking_hood_enum_type_intensive_stage_intensive_stage1 required: false @@ -528,7 +528,7 @@ set_program_and_options: collapsed: true fields: laundry_care_washer_option_temperature: - example: laundry_care_washer_enum_type_temperature_g_c40 + example: laundry_care_washer_enum_type_temperature_g_c_40 required: false selector: select: @@ -536,14 +536,14 @@ set_program_and_options: translation_key: washer_temperature options: - laundry_care_washer_enum_type_temperature_cold - - laundry_care_washer_enum_type_temperature_g_c20 - - laundry_care_washer_enum_type_temperature_g_c30 - - laundry_care_washer_enum_type_temperature_g_c40 - - laundry_care_washer_enum_type_temperature_g_c50 - - laundry_care_washer_enum_type_temperature_g_c60 - - laundry_care_washer_enum_type_temperature_g_c70 - - laundry_care_washer_enum_type_temperature_g_c80 - - laundry_care_washer_enum_type_temperature_g_c90 + - laundry_care_washer_enum_type_temperature_g_c_20 + - laundry_care_washer_enum_type_temperature_g_c_30 + - laundry_care_washer_enum_type_temperature_g_c_40 + - laundry_care_washer_enum_type_temperature_g_c_50 + - laundry_care_washer_enum_type_temperature_g_c_60 + - laundry_care_washer_enum_type_temperature_g_c_70 + - laundry_care_washer_enum_type_temperature_g_c_80 + - laundry_care_washer_enum_type_temperature_g_c_90 - laundry_care_washer_enum_type_temperature_ul_cold - laundry_care_washer_enum_type_temperature_ul_warm - laundry_care_washer_enum_type_temperature_ul_hot @@ -557,13 +557,15 @@ set_program_and_options: translation_key: spin_speed options: - laundry_care_washer_enum_type_spin_speed_off - - laundry_care_washer_enum_type_spin_speed_r_p_m400 - - laundry_care_washer_enum_type_spin_speed_r_p_m600 - - laundry_care_washer_enum_type_spin_speed_r_p_m800 - - laundry_care_washer_enum_type_spin_speed_r_p_m1000 - - laundry_care_washer_enum_type_spin_speed_r_p_m1200 - - laundry_care_washer_enum_type_spin_speed_r_p_m1400 - - laundry_care_washer_enum_type_spin_speed_r_p_m1600 + - laundry_care_washer_enum_type_spin_speed_r_p_m_400 + - laundry_care_washer_enum_type_spin_speed_r_p_m_600 + - laundry_care_washer_enum_type_spin_speed_r_p_m_700 + - laundry_care_washer_enum_type_spin_speed_r_p_m_800 + - laundry_care_washer_enum_type_spin_speed_r_p_m_900 + - laundry_care_washer_enum_type_spin_speed_r_p_m_1000 + - laundry_care_washer_enum_type_spin_speed_r_p_m_1200 + - laundry_care_washer_enum_type_spin_speed_r_p_m_1400 + - laundry_care_washer_enum_type_spin_speed_r_p_m_1600 - laundry_care_washer_enum_type_spin_speed_ul_off - laundry_care_washer_enum_type_spin_speed_ul_low - laundry_care_washer_enum_type_spin_speed_ul_medium diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index 7b06128dbe6..44a6eb17cea 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -110,6 +110,28 @@ } }, "issues": { + "deprecated_time_alarm_clock_in_automations_scripts": { + "title": "Deprecated alarm clock entity detected in some automations or scripts", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock_in_automations_scripts::title%]", + "description": "The alarm clock entity `{entity_id}`, which is deprecated because it's being moved to the `number` platform, is used in the following automations or scripts:\n{items}\n\nPlease, fix this issue by updating your automations or scripts to use the new `number` entity." + } + } + } + }, + "deprecated_time_alarm_clock": { + "title": "Deprecated alarm clock entity", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock::title%]", + "description": "The alarm clock entity `{entity_id}` is deprecated because it's being moved to the `number` platform.\n\nPlease use the new `number` entity." + } + } + } + }, "deprecated_binary_common_door_sensor": { "title": "Deprecated binary door sensor detected in some automations or scripts", "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." @@ -417,11 +439,11 @@ "venting_level": { "options": { "cooking_hood_enum_type_stage_fan_off": "Fan off", - "cooking_hood_enum_type_stage_fan_stage01": "Fan stage 1", - "cooking_hood_enum_type_stage_fan_stage02": "Fan stage 2", - "cooking_hood_enum_type_stage_fan_stage03": "Fan stage 3", - "cooking_hood_enum_type_stage_fan_stage04": "Fan stage 4", - "cooking_hood_enum_type_stage_fan_stage05": "Fan stage 5" + "cooking_hood_enum_type_stage_fan_stage_01": "Fan stage 1", + "cooking_hood_enum_type_stage_fan_stage_02": "Fan stage 2", + "cooking_hood_enum_type_stage_fan_stage_03": "Fan stage 3", + "cooking_hood_enum_type_stage_fan_stage_04": "Fan stage 4", + "cooking_hood_enum_type_stage_fan_stage_05": "Fan stage 5" } }, "intensive_level": { @@ -441,14 +463,14 @@ "washer_temperature": { "options": { "laundry_care_washer_enum_type_temperature_cold": "Cold", - "laundry_care_washer_enum_type_temperature_g_c20": "20ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c30": "30ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c40": "40ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c50": "50ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c60": "60ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c70": "70ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c80": "80ºC clothes", - "laundry_care_washer_enum_type_temperature_g_c90": "90ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_20": "20ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_30": "30ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_40": "40ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_50": "50ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_60": "60ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_70": "70ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_80": "80ºC clothes", + "laundry_care_washer_enum_type_temperature_g_c_90": "90ºC clothes", "laundry_care_washer_enum_type_temperature_ul_cold": "Cold", "laundry_care_washer_enum_type_temperature_ul_warm": "Warm", "laundry_care_washer_enum_type_temperature_ul_hot": "Hot", @@ -458,13 +480,15 @@ "spin_speed": { "options": { "laundry_care_washer_enum_type_spin_speed_off": "Off", - "laundry_care_washer_enum_type_spin_speed_r_p_m400": "400 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m600": "600 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m800": "800 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m1000": "1000 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m1200": "1200 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m1400": "1400 rpm", - "laundry_care_washer_enum_type_spin_speed_r_p_m1600": "1600 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "400 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "600 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "700 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_800": "800 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_900": "900 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1000": "1000 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "1200 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "1400 rpm", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "1600 rpm", "laundry_care_washer_enum_type_spin_speed_ul_off": "Off", "laundry_care_washer_enum_type_spin_speed_ul_low": "Low", "laundry_care_washer_enum_type_spin_speed_ul_medium": "Medium", @@ -809,9 +833,18 @@ "bottle_cooler_door": { "name": "Bottle cooler door" }, + "common_chiller_door": { + "name": "Common chiller door" + }, "chiller_door": { "name": "Chiller door" }, + "left_chiller_door": { + "name": "Left chiller door" + }, + "right_chiller_door": { + "name": "Right chiller door" + }, "flex_compartment_door": { "name": "Flex compartment door" }, @@ -857,6 +890,9 @@ } }, "number": { + "alarm_clock": { + "name": "Alarm clock" + }, "refrigerator_setpoint_temperature": { "name": "Refrigerator temperature" }, @@ -1382,11 +1418,11 @@ "name": "[%key:component::home_connect::services::set_program_and_options::fields::cooking_hood_option_venting_level::name%]", "state": { "cooking_hood_enum_type_stage_fan_off": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_off%]", - "cooking_hood_enum_type_stage_fan_stage01": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage01%]", - "cooking_hood_enum_type_stage_fan_stage02": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage02%]", - "cooking_hood_enum_type_stage_fan_stage03": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage03%]", - "cooking_hood_enum_type_stage_fan_stage04": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage04%]", - "cooking_hood_enum_type_stage_fan_stage05": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage05%]" + "cooking_hood_enum_type_stage_fan_stage_01": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_01%]", + "cooking_hood_enum_type_stage_fan_stage_02": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_02%]", + "cooking_hood_enum_type_stage_fan_stage_03": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_03%]", + "cooking_hood_enum_type_stage_fan_stage_04": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_04%]", + "cooking_hood_enum_type_stage_fan_stage_05": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_05%]" } }, "intensive_level": { @@ -1409,14 +1445,14 @@ "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_temperature::name%]", "state": { "laundry_care_washer_enum_type_temperature_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_cold%]", - "laundry_care_washer_enum_type_temperature_g_c20": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c20%]", - "laundry_care_washer_enum_type_temperature_g_c30": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c30%]", - "laundry_care_washer_enum_type_temperature_g_c40": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c40%]", - "laundry_care_washer_enum_type_temperature_g_c50": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c50%]", - "laundry_care_washer_enum_type_temperature_g_c60": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c60%]", - "laundry_care_washer_enum_type_temperature_g_c70": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c70%]", - "laundry_care_washer_enum_type_temperature_g_c80": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c80%]", - "laundry_care_washer_enum_type_temperature_g_c90": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c90%]", + "laundry_care_washer_enum_type_temperature_g_c_20": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_20%]", + "laundry_care_washer_enum_type_temperature_g_c_30": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_30%]", + "laundry_care_washer_enum_type_temperature_g_c_40": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_40%]", + "laundry_care_washer_enum_type_temperature_g_c_50": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_50%]", + "laundry_care_washer_enum_type_temperature_g_c_60": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_60%]", + "laundry_care_washer_enum_type_temperature_g_c_70": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_70%]", + "laundry_care_washer_enum_type_temperature_g_c_80": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_80%]", + "laundry_care_washer_enum_type_temperature_g_c_90": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_90%]", "laundry_care_washer_enum_type_temperature_ul_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_cold%]", "laundry_care_washer_enum_type_temperature_ul_warm": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_warm%]", "laundry_care_washer_enum_type_temperature_ul_hot": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_hot%]", @@ -1427,13 +1463,15 @@ "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_spin_speed::name%]", "state": { "laundry_care_washer_enum_type_spin_speed_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_off%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m400%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m600%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m800": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m800%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m1000": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1000%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m1200": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1200%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m1400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1400%]", - "laundry_care_washer_enum_type_spin_speed_r_p_m1600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1600%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_400%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_600%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_700%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_800": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_800%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_900": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_900%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1000": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1000%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1200%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1400%]", + "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1600%]", "laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_off%]", "laundry_care_washer_enum_type_spin_speed_ul_low": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_low%]", "laundry_care_washer_enum_type_spin_speed_ul_medium": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_medium%]", @@ -1532,23 +1570,64 @@ "oven_current_cavity_temperature": { "name": "Current oven cavity temperature" }, - "freezer_door_alarm": { - "name": "Freezer door alarm", - "state": { - "confirmed": "[%key:component::home_connect::common::confirmed%]", - "present": "[%key:component::home_connect::common::present%]" - } - }, - "refrigerator_door_alarm": { - "name": "Refrigerator door alarm", + "program_aborted": { + "name": "Program aborted", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "freezer_temperature_alarm": { - "name": "Freezer temperature alarm", + "program_finished": { + "name": "Program finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "alarm_clock_elapsed": { + "name": "Alarm clock elapsed", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "preheat_finished": { + "name": "Pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "regular_preheat_finished": { + "name": "Regular pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "drying_process_finished": { + "name": "Drying process finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "salt_nearly_empty": { + "name": "Salt nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "rinse_aid_nearly_empty": { + "name": "Rinse aid nearly empty", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", @@ -1579,16 +1658,216 @@ "present": "[%key:component::home_connect::common::present%]" } }, - "salt_nearly_empty": { - "name": "Salt nearly empty", + "keep_milk_tank_cool": { + "name": "Keep milk tank cool", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "rinse_aid_nearly_empty": { - "name": "Rinse aid nearly empty", + "descaling_in_20_cups": { + "name": "Descaling in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_15_cups": { + "name": "Descaling in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_10_cups": { + "name": "Descaling in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_5_cups": { + "name": "Descaling in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_descaled": { + "name": "Device should be descaled", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_overdue": { + "name": "Device descaling overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_blockage": { + "name": "Device descaling blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_cleaned": { + "name": "Device should be cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_cleaning_overdue": { + "name": "Device cleaning overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in20cups": { + "name": "Calc'N'Clean in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in15cups": { + "name": "Calc'N'Clean in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in10cups": { + "name": "Calc'N'Clean in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in5cups": { + "name": "Calc'N'Clean in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_calc_n_cleaned": { + "name": "Device should be Calc'N'Cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_overdue": { + "name": "Device Calc'N'Clean overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_blockage": { + "name": "Device Calc'N'Clean blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_door_alarm": { + "name": "Freezer door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "refrigerator_door_alarm": { + "name": "Refrigerator door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_temperature_alarm": { + "name": "Freezer temperature alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "empty_dust_box_and_clean_filter": { + "name": "Empty dust box and clean filter", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "robot_is_stuck": { + "name": "Robot is stuck", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "docking_station_not_found": { + "name": "Docking station not found", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_1_fill_level": { + "name": "Poor i-Dos 1 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_2_fill_level": { + "name": "Poor i-Dos 2 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_nearly_reached": { + "name": "Grease filter max saturation nearly reached", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_reached": { + "name": "Grease filter max saturation reached", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 6f9aa0e679f..33e30f184b7 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -22,16 +22,7 @@ from homeassistant.helpers.issue_registry import ( from homeassistant.helpers.typing import UNDEFINED, UndefinedType from .common import setup_home_connect_entry -from .const import ( - BSH_POWER_OFF, - BSH_POWER_ON, - BSH_POWER_STANDBY, - DOMAIN, - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, -) +from .const import BSH_POWER_OFF, BSH_POWER_ON, BSH_POWER_STANDBY, DOMAIN from .coordinator import ( HomeConnectApplianceData, HomeConnectConfigEntry, @@ -226,8 +217,8 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_key="turn_on", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + "entity_id": self.entity_id, + "key": self.bsh_key, }, ) from err @@ -246,8 +237,8 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_key="turn_off", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + "entity_id": self.entity_id, + "key": self.bsh_key, }, ) from err @@ -385,7 +376,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_key="power_on", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name, + "appliance_name": self.appliance.info.name, }, ) from err @@ -398,7 +389,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_domain=DOMAIN, translation_key="unable_to_retrieve_turn_off", translation_placeholders={ - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name + "appliance_name": self.appliance.info.name }, ) @@ -406,9 +397,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_off_not_supported", - translation_placeholders={ - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name - }, + translation_placeholders={"appliance_name": self.appliance.info.name}, ) try: await self.coordinator.client.set_setting( @@ -423,8 +412,8 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_key="power_off", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name, - SVE_TRANSLATION_PLACEHOLDER_VALUE: self.power_off_state, + "appliance_name": self.appliance.info.name, + "value": self.power_off_state, }, ) from err diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index a1761219d30..adf26d2d973 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -1,4 +1,4 @@ -"""Provides time enties for Home Connect.""" +"""Provides time entities for Home Connect.""" from datetime import time from typing import cast @@ -6,19 +6,21 @@ from typing import cast from aiohomeconnect.model import SettingKey from aiohomeconnect.model.error import HomeConnectError +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from .common import setup_home_connect_entry -from .const import ( - DOMAIN, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, -) +from .const import DOMAIN from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry from .entity import HomeConnectEntity from .utils import get_dict_from_home_connect_error @@ -29,6 +31,7 @@ TIME_ENTITIES = ( TimeEntityDescription( key=SettingKey.BSH_COMMON_ALARM_CLOCK, translation_key="alarm_clock", + entity_registry_enabled_default=False, ), ) @@ -73,8 +76,78 @@ def time_to_seconds(t: time) -> int: class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): """Time setting class for Home Connect.""" + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + ) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}" + ) + async def async_set_value(self, value: time) -> None: """Set the native value of the entity.""" + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) try: await self.coordinator.client.set_setting( self.appliance.info.ha_id, @@ -84,12 +157,12 @@ class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": str(value), }, ) from err diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 4ca56471452..b8b5f77cf52 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -188,7 +188,7 @@ }, "reload_all": { "name": "Reload all", - "description": "Reload all YAML configuration that can be reloaded without restarting Home Assistant." + "description": "Reloads all YAML configuration that can be reloaded without restarting Home Assistant." } }, "exceptions": { diff --git a/homeassistant/components/homeassistant_hardware/coordinator.py b/homeassistant/components/homeassistant_hardware/coordinator.py new file mode 100644 index 00000000000..9eb900b13fd --- /dev/null +++ b/homeassistant/components/homeassistant_hardware/coordinator.py @@ -0,0 +1,47 @@ +"""Home Assistant hardware firmware update coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from aiohttp import ClientSession +from ha_silabs_firmware_client import ( + FirmwareManifest, + FirmwareUpdateClient, + ManifestMissing, +) + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_REFRESH_INTERVAL = timedelta(hours=8) + + +class FirmwareUpdateCoordinator(DataUpdateCoordinator[FirmwareManifest]): + """Coordinator to manage firmware updates.""" + + def __init__(self, hass: HomeAssistant, session: ClientSession, url: str) -> None: + """Initialize the firmware update coordinator.""" + super().__init__( + hass, + _LOGGER, + name="firmware update coordinator", + update_interval=FIRMWARE_REFRESH_INTERVAL, + always_update=False, + ) + self.hass = hass + self.session = session + + self.client = FirmwareUpdateClient(url, session) + + async def _async_update_data(self) -> FirmwareManifest: + try: + return await self.client.async_update_data() + except ManifestMissing as err: + raise UpdateFailed( + "GitHub release assets haven't been uploaded yet" + ) from err diff --git a/homeassistant/components/homeassistant_hardware/manifest.json b/homeassistant/components/homeassistant_hardware/manifest.json index 8f59ab61600..f3a02185b83 100644 --- a/homeassistant/components/homeassistant_hardware/manifest.json +++ b/homeassistant/components/homeassistant_hardware/manifest.json @@ -5,5 +5,8 @@ "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware", "integration_type": "system", - "requirements": ["universal-silabs-flasher==0.0.29"] + "requirements": [ + "universal-silabs-flasher==0.0.30", + "ha-silabs-firmware-client==0.2.0" + ] } diff --git a/homeassistant/components/homeassistant_hardware/strings.json b/homeassistant/components/homeassistant_hardware/strings.json index 5456f418c75..6dda01561f1 100644 --- a/homeassistant/components/homeassistant_hardware/strings.json +++ b/homeassistant/components/homeassistant_hardware/strings.json @@ -39,7 +39,7 @@ "description": "The OpenThread Border Router (OTBR) add-on is now starting." }, "otbr_failed": { - "title": "Failed to setup OpenThread Border Router", + "title": "Failed to set up OpenThread Border Router", "description": "The OpenThread Border Router add-on installation was unsuccessful. Ensure no other software is trying to communicate with the {model}, you have access to the Internet and can install other add-ons, and try again. Check the Supervisor logs if the problem persists." }, "confirm_otbr": { diff --git a/homeassistant/components/homeassistant_hardware/update.py b/homeassistant/components/homeassistant_hardware/update.py new file mode 100644 index 00000000000..e835286238f --- /dev/null +++ b/homeassistant/components/homeassistant_hardware/update.py @@ -0,0 +1,331 @@ +"""Home Assistant Hardware base firmware update entity.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable +from contextlib import AsyncExitStack, asynccontextmanager +from dataclasses import dataclass +import logging +from typing import Any, cast + +from ha_silabs_firmware_client import FirmwareManifest, FirmwareMetadata +from universal_silabs_flasher.firmware import parse_firmware_image +from universal_silabs_flasher.flasher import Flasher +from yarl import URL + +from homeassistant.components.update import ( + UpdateEntity, + UpdateEntityDescription, + UpdateEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import CALLBACK_TYPE, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.restore_state import ExtraStoredData +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import FirmwareUpdateCoordinator +from .helpers import async_register_firmware_info_callback +from .util import ( + ApplicationType, + FirmwareInfo, + guess_firmware_info, + probe_silabs_firmware_info, +) + +_LOGGER = logging.getLogger(__name__) + +type FirmwareChangeCallbackType = Callable[ + [ApplicationType | None, ApplicationType | None], None +] + + +@dataclass(kw_only=True, frozen=True) +class FirmwareUpdateEntityDescription(UpdateEntityDescription): + """Describes Home Assistant Hardware firmware update entity.""" + + version_parser: Callable[[str], str] + fw_type: str | None + version_key: str | None + expected_firmware_type: ApplicationType | None + firmware_name: str | None + + +@dataclass +class FirmwareUpdateExtraStoredData(ExtraStoredData): + """Extra stored data for Home Assistant Hardware firmware update entity.""" + + firmware_manifest: FirmwareManifest | None = None + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the extra data.""" + return { + "firmware_manifest": ( + self.firmware_manifest.as_dict() + if self.firmware_manifest is not None + else None + ) + } + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> FirmwareUpdateExtraStoredData: + """Initialize the extra data from a dict.""" + if data["firmware_manifest"] is None: + return cls(firmware_manifest=None) + + return cls( + FirmwareManifest.from_json( + data["firmware_manifest"], + # This data is not technically part of the manifest and is loaded externally + url=URL(data["firmware_manifest"]["url"]), + html_url=URL(data["firmware_manifest"]["html_url"]), + ) + ) + + +class BaseFirmwareUpdateEntity( + CoordinatorEntity[FirmwareUpdateCoordinator], UpdateEntity +): + """Base Home Assistant Hardware firmware update entity.""" + + # Subclasses provide the mapping between firmware types and entity descriptions + entity_description: FirmwareUpdateEntityDescription + bootloader_reset_type: str | None = None + + _attr_supported_features = ( + UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) + # Until this entity can be associated with a device, we must manually name it + _attr_has_entity_name = False + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the Hardware firmware update entity.""" + super().__init__(update_coordinator) + + self.entity_description = entity_description + self._current_device = device + self._config_entry = config_entry + self._current_firmware_info: FirmwareInfo | None = None + self._firmware_type_change_callbacks: set[FirmwareChangeCallbackType] = set() + + self._latest_manifest: FirmwareManifest | None = None + self._latest_firmware: FirmwareMetadata | None = None + + def add_firmware_type_changed_callback( + self, + change_callback: FirmwareChangeCallbackType, + ) -> CALLBACK_TYPE: + """Add a callback for when the firmware type changes.""" + self._firmware_type_change_callbacks.add(change_callback) + + @callback + def remove_callback() -> None: + self._firmware_type_change_callbacks.discard(change_callback) + + return remove_callback + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + self.async_on_remove( + async_register_firmware_info_callback( + self.hass, + self._current_device, + self._firmware_info_callback, + ) + ) + + self.async_on_remove( + self._config_entry.async_on_state_change(self._on_config_entry_change) + ) + + if (extra_data := await self.async_get_last_extra_data()) and ( + hardware_extra_data := FirmwareUpdateExtraStoredData.from_dict( + extra_data.as_dict() + ) + ): + self._latest_manifest = hardware_extra_data.firmware_manifest + + self._update_attributes() + + @property + def extra_restore_state_data(self) -> FirmwareUpdateExtraStoredData: + """Return state data to be restored.""" + return FirmwareUpdateExtraStoredData(firmware_manifest=self._latest_manifest) + + @callback + def _on_config_entry_change(self) -> None: + """Handle config entry changes.""" + self._update_attributes() + self.async_write_ha_state() + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self._current_firmware_info = firmware_info + + # If the firmware type does not change, we can just update the attributes + if ( + self._current_firmware_info.firmware_type + == self.entity_description.expected_firmware_type + ): + self._update_attributes() + self.async_write_ha_state() + return + + # Otherwise, fire the firmware type change callbacks. They are expected to + # replace the entity so there is no purpose in firing other callbacks. + for change_callback in self._firmware_type_change_callbacks.copy(): + try: + change_callback( + self.entity_description.expected_firmware_type, + self._current_firmware_info.firmware_type, + ) + except Exception: # noqa: BLE001 + _LOGGER.warning( + "Failed to call firmware type changed callback", exc_info=True + ) + + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + + # This entity is not currently associated with a device so we must manually + # give it a name + self._attr_name = f"{self._config_entry.title} Update" + self._attr_title = self.entity_description.firmware_name or "unknown" + + if ( + self._current_firmware_info is None + or self._current_firmware_info.firmware_version is None + ): + self._attr_installed_version = None + else: + self._attr_installed_version = self.entity_description.version_parser( + self._current_firmware_info.firmware_version + ) + + self._latest_firmware = None + self._attr_latest_version = None + self._attr_release_summary = None + self._attr_release_url = None + + if ( + self._latest_manifest is None + or self.entity_description.fw_type is None + or self.entity_description.version_key is None + ): + return + + try: + self._latest_firmware = next( + f + for f in self._latest_manifest.firmwares + if f.filename.startswith(self.entity_description.fw_type) + ) + except StopIteration: + pass + else: + version = cast( + str, self._latest_firmware.metadata[self.entity_description.version_key] + ) + self._attr_latest_version = self.entity_description.version_parser(version) + self._attr_release_summary = self._latest_firmware.release_notes + self._attr_release_url = str(self._latest_manifest.html_url) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._latest_manifest = self.coordinator.data + self._update_attributes() + self.async_write_ha_state() + + def _update_progress(self, offset: int, total_size: int) -> None: + """Handle update progress.""" + + # Firmware updates in ~30s so we still get responsive update progress even + # without decimal places + self._attr_update_percentage = round((offset * 100) / total_size) + self.async_write_ha_state() + + @asynccontextmanager + async def _temporarily_stop_hardware_owners( + self, device: str + ) -> AsyncIterator[None]: + """Temporarily stop addons and integrations communicating with the device.""" + firmware_info = await guess_firmware_info(self.hass, device) + _LOGGER.debug("Identified firmware info: %s", firmware_info) + + async with AsyncExitStack() as stack: + for owner in firmware_info.owners: + await stack.enter_async_context(owner.temporarily_stop(self.hass)) + + yield + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install an update.""" + assert self._latest_firmware is not None + assert self.entity_description.expected_firmware_type is not None + + # Start off by setting the progress bar to an indeterminate state + self._attr_in_progress = True + self._attr_update_percentage = None + self.async_write_ha_state() + + fw_data = await self.coordinator.client.async_fetch_firmware( + self._latest_firmware + ) + fw_image = await self.hass.async_add_executor_job(parse_firmware_image, fw_data) + + device = self._current_device + + flasher = Flasher( + device=device, + probe_methods=( + ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(), + ApplicationType.EZSP.as_flasher_application_type(), + ApplicationType.SPINEL.as_flasher_application_type(), + ApplicationType.CPC.as_flasher_application_type(), + ), + bootloader_reset=self.bootloader_reset_type, + ) + + async with self._temporarily_stop_hardware_owners(device): + try: + try: + # Enter the bootloader with indeterminate progress + await flasher.enter_bootloader() + + # Flash the firmware, with progress + await flasher.flash_firmware( + fw_image, progress_callback=self._update_progress + ) + except Exception as err: + raise HomeAssistantError("Failed to flash firmware") from err + + # Probe the running application type with indeterminate progress + self._attr_update_percentage = None + self.async_write_ha_state() + + firmware_info = await probe_silabs_firmware_info( + device, + probe_methods=(self.entity_description.expected_firmware_type,), + ) + + if firmware_info is None: + raise HomeAssistantError( + "Failed to probe the firmware after flashing" + ) + + self._firmware_info_callback(firmware_info) + finally: + self._attr_in_progress = False + self.async_write_ha_state() diff --git a/homeassistant/components/homeassistant_hardware/util.py b/homeassistant/components/homeassistant_hardware/util.py index 1afb786369e..64f363e4f23 100644 --- a/homeassistant/components/homeassistant_hardware/util.py +++ b/homeassistant/components/homeassistant_hardware/util.py @@ -4,7 +4,8 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Iterable +from collections.abc import AsyncIterator, Iterable +from contextlib import asynccontextmanager from dataclasses import dataclass from enum import StrEnum import logging @@ -105,6 +106,28 @@ class OwningAddon: else: return addon_info.state == AddonState.RUNNING + @asynccontextmanager + async def temporarily_stop(self, hass: HomeAssistant) -> AsyncIterator[None]: + """Temporarily stop the add-on, restarting it after completion.""" + addon_manager = self._get_addon_manager(hass) + + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError: + yield + return + + if addon_info.state != AddonState.RUNNING: + yield + return + + try: + await addon_manager.async_stop_addon() + await addon_manager.async_wait_until_addon_state(AddonState.NOT_RUNNING) + yield + finally: + await addon_manager.async_start_addon_waiting() + @dataclass(kw_only=True) class OwningIntegration: @@ -123,6 +146,23 @@ class OwningIntegration: ConfigEntryState.SETUP_IN_PROGRESS, ) + @asynccontextmanager + async def temporarily_stop(self, hass: HomeAssistant) -> AsyncIterator[None]: + """Temporarily stop the integration, restarting it after completion.""" + if (entry := hass.config_entries.async_get_entry(self.config_entry_id)) is None: + yield + return + + if entry.state != ConfigEntryState.LOADED: + yield + return + + try: + await hass.config_entries.async_unload(entry.entry_id) + yield + finally: + await hass.config_entries.async_setup(entry.entry_id) + @dataclass(kw_only=True) class FirmwareInfo: diff --git a/homeassistant/components/homeassistant_sky_connect/__init__.py b/homeassistant/components/homeassistant_sky_connect/__init__.py index 758f0c1e1ef..b3af47df61d 100644 --- a/homeassistant/components/homeassistant_sky_connect/__init__.py +++ b/homeassistant/components/homeassistant_sky_connect/__init__.py @@ -8,11 +8,16 @@ from homeassistant.components.homeassistant_hardware.util import guess_firmware_ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from .const import DESCRIPTION, DEVICE, FIRMWARE, FIRMWARE_VERSION, PRODUCT + _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a Home Assistant SkyConnect config entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, ["update"]) + return True @@ -33,15 +38,13 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # Add-on startup with type service get started before Core, always (e.g. the # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, # so we can't safely probe here. Instead, we must make an educated guess! - firmware_guess = await guess_firmware_info( - hass, config_entry.data["device"] - ) + firmware_guess = await guess_firmware_info(hass, config_entry.data[DEVICE]) new_data = {**config_entry.data} - new_data["firmware"] = firmware_guess.firmware_type.value + new_data[FIRMWARE] = firmware_guess.firmware_type.value # Copy `description` to `product` - new_data["product"] = new_data["description"] + new_data[PRODUCT] = new_data[DESCRIPTION] hass.config_entries.async_update_entry( config_entry, @@ -50,6 +53,18 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> minor_version=2, ) + if config_entry.minor_version == 2: + # Add a `firmware_version` key + hass.config_entries.async_update_entry( + config_entry, + data={ + **config_entry.data, + FIRMWARE_VERSION: None, + }, + version=1, + minor_version=3, + ) + _LOGGER.debug( "Migration to version %s.%s successful", config_entry.version, diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index d8446c2d3f9..d28d74a681c 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -24,7 +24,20 @@ from homeassistant.config_entries import ( from homeassistant.core import callback from homeassistant.helpers.service_info.usb import UsbServiceInfo -from .const import DOCS_WEB_FLASHER_URL, DOMAIN, HardwareVariant +from .const import ( + DESCRIPTION, + DEVICE, + DOCS_WEB_FLASHER_URL, + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + MANUFACTURER, + PID, + PRODUCT, + SERIAL_NUMBER, + VID, + HardwareVariant, +) from .util import get_hardware_variant, get_usb_service_info _LOGGER = logging.getLogger(__name__) @@ -37,6 +50,7 @@ if TYPE_CHECKING: def _get_translation_placeholders(self) -> dict[str, str]: return {} + else: # Multiple inheritance with `Protocol` seems to break TranslationPlaceholderProtocol = object @@ -67,7 +81,7 @@ class HomeAssistantSkyConnectConfigFlow( """Handle a config flow for Home Assistant SkyConnect.""" VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize the config flow.""" @@ -82,7 +96,7 @@ class HomeAssistantSkyConnectConfigFlow( config_entry: ConfigEntry, ) -> OptionsFlow: """Return the options flow.""" - firmware_type = ApplicationType(config_entry.data["firmware"]) + firmware_type = ApplicationType(config_entry.data[FIRMWARE]) if firmware_type is ApplicationType.CPC: return HomeAssistantSkyConnectMultiPanOptionsFlowHandler(config_entry) @@ -100,7 +114,7 @@ class HomeAssistantSkyConnectConfigFlow( unique_id = f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}" if await self.async_set_unique_id(unique_id): - self._abort_if_unique_id_configured(updates={"device": device}) + self._abort_if_unique_id_configured(updates={DEVICE: device}) discovery_info.device = await self.hass.async_add_executor_job( usb.get_serial_by_id, discovery_info.device @@ -126,14 +140,15 @@ class HomeAssistantSkyConnectConfigFlow( return self.async_create_entry( title=self._hw_variant.full_name, data={ - "vid": self._usb_info.vid, - "pid": self._usb_info.pid, - "serial_number": self._usb_info.serial_number, - "manufacturer": self._usb_info.manufacturer, - "description": self._usb_info.description, # For backwards compatibility - "product": self._usb_info.description, - "device": self._usb_info.device, - "firmware": self._probed_firmware_info.firmware_type.value, + VID: self._usb_info.vid, + PID: self._usb_info.pid, + SERIAL_NUMBER: self._usb_info.serial_number, + MANUFACTURER: self._usb_info.manufacturer, + DESCRIPTION: self._usb_info.description, # For backwards compatibility + PRODUCT: self._usb_info.description, + DEVICE: self._usb_info.device, + FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, ) @@ -148,7 +163,7 @@ class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( ) -> silabs_multiprotocol_addon.SerialPortSettings: """Return the radio serial port settings.""" return silabs_multiprotocol_addon.SerialPortSettings( - device=self.config_entry.data["device"], + device=self.config_entry.data[DEVICE], baudrate="115200", flow_control=True, ) @@ -182,7 +197,8 @@ class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( entry=self.config_entry, data={ **self.config_entry.data, - "firmware": ApplicationType.EZSP.value, + FIRMWARE: ApplicationType.EZSP.value, + FIRMWARE_VERSION: None, }, options=self.config_entry.options, ) @@ -201,15 +217,15 @@ class HomeAssistantSkyConnectOptionsFlowHandler( self._usb_info = get_usb_service_info(self.config_entry) self._hw_variant = HardwareVariant.from_usb_product_name( - self.config_entry.data["product"] + self.config_entry.data[PRODUCT] ) self._hardware_name = self._hw_variant.full_name self._device = self._usb_info.device self._probed_firmware_info = FirmwareInfo( device=self._device, - firmware_type=ApplicationType(self.config_entry.data["firmware"]), - firmware_version=None, + firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]), + firmware_version=self.config_entry.data[FIRMWARE_VERSION], source="guess", owners=[], ) @@ -225,7 +241,8 @@ class HomeAssistantSkyConnectOptionsFlowHandler( entry=self.config_entry, data={ **self.config_entry.data, - "firmware": self._probed_firmware_info.firmware_type.value, + FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, options=self.config_entry.options, ) diff --git a/homeassistant/components/homeassistant_sky_connect/const.py b/homeassistant/components/homeassistant_sky_connect/const.py index cae0b98a25b..70ff047366d 100644 --- a/homeassistant/components/homeassistant_sky_connect/const.py +++ b/homeassistant/components/homeassistant_sky_connect/const.py @@ -7,6 +7,20 @@ from typing import Self DOMAIN = "homeassistant_sky_connect" DOCS_WEB_FLASHER_URL = "https://skyconnect.home-assistant.io/firmware-update/" +NABU_CASA_FIRMWARE_RELEASES_URL = ( + "https://api.github.com/repos/NabuCasa/silabs-firmware-builder/releases/latest" +) + +FIRMWARE = "firmware" +FIRMWARE_VERSION = "firmware_version" +SERIAL_NUMBER = "serial_number" +MANUFACTURER = "manufacturer" +PRODUCT = "product" +DESCRIPTION = "description" +PID = "pid" +VID = "vid" +DEVICE = "device" + @dataclasses.dataclass(frozen=True) class VariantInfo: diff --git a/homeassistant/components/homeassistant_sky_connect/update.py b/homeassistant/components/homeassistant_sky_connect/update.py new file mode 100644 index 00000000000..43e3f1ca255 --- /dev/null +++ b/homeassistant/components/homeassistant_sky_connect/update.py @@ -0,0 +1,169 @@ +"""Home Assistant SkyConnect firmware update entity.""" + +from __future__ import annotations + +import logging + +import aiohttp + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import FIRMWARE, FIRMWARE_VERSION, NABU_CASA_FIRMWARE_RELEASES_URL + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="skyconnect_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="skyconnect_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + None: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> FirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data[FIRMWARE] + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) if firmware_type is not None else None + ] + + entity = FirmwareUpdateEntity( + device=config_entry.data["device"], + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + NABU_CASA_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """SkyConnect firmware update entity.""" + + bootloader_reset_type = None + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the SkyConnect firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + + self._attr_unique_id = ( + f"{self._config_entry.data['serial_number']}_{self.entity_description.key}" + ) + + # Use the cached firmware info if it exists + if self._config_entry.data[FIRMWARE] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]), + firmware_version=self._config_entry.data[FIRMWARE_VERSION], + owners=[], + source="homeassistant_sky_connect", + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + FIRMWARE: firmware_info.firmware_type, + FIRMWARE_VERSION: firmware_info.firmware_version, + }, + ) + super()._firmware_info_callback(firmware_info) diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index b0837eeedbe..06f908ab61e 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -18,7 +18,7 @@ from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow from homeassistant.helpers.hassio import is_hassio -from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA +from .const import FIRMWARE, FIRMWARE_VERSION, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA _LOGGER = logging.getLogger(__name__) @@ -55,6 +55,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: data=ZHA_HW_DISCOVERY_DATA, ) + await hass.config_entries.async_forward_entry_setups(entry, ["update"]) + return True @@ -87,6 +89,18 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> minor_version=2, ) + if config_entry.minor_version == 2: + # Add a `firmware_version` key + hass.config_entries.async_update_entry( + config_entry, + data={ + **config_entry.data, + FIRMWARE_VERSION: None, + }, + version=1, + minor_version=3, + ) + _LOGGER.debug( "Migration to version %s.%s successful", config_entry.version, diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index b916c6e46ca..5472c346e94 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -37,7 +37,14 @@ from homeassistant.config_entries import ( from homeassistant.core import HomeAssistant, async_get_hass, callback from homeassistant.helpers import discovery_flow, selector -from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA +from .const import ( + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + RADIO_DEVICE, + ZHA_DOMAIN, + ZHA_HW_DISCOVERY_DATA, +) from .hardware import BOARD_NAME _LOGGER = logging.getLogger(__name__) @@ -55,7 +62,7 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): """Handle a config flow for Home Assistant Yellow.""" VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def __init__(self, *args: Any, **kwargs: Any) -> None: """Instantiate config flow.""" @@ -310,6 +317,7 @@ class HomeAssistantYellowOptionsFlowHandler( data={ **self.config_entry.data, FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, ) diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index 79753ae9b9e..b98b1133d01 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -2,7 +2,10 @@ DOMAIN = "homeassistant_yellow" +RADIO_MODEL = "Home Assistant Yellow" +RADIO_MANUFACTURER = "Nabu Casa" RADIO_DEVICE = "/dev/ttyAMA1" + ZHA_HW_DISCOVERY_DATA = { "name": "Yellow", "port": { @@ -14,4 +17,9 @@ ZHA_HW_DISCOVERY_DATA = { } FIRMWARE = "firmware" +FIRMWARE_VERSION = "firmware_version" ZHA_DOMAIN = "zha" + +NABU_CASA_FIRMWARE_RELEASES_URL = ( + "https://api.github.com/repos/NabuCasa/silabs-firmware-builder/releases/latest" +) diff --git a/homeassistant/components/homeassistant_yellow/update.py b/homeassistant/components/homeassistant_yellow/update.py new file mode 100644 index 00000000000..88d4f2912d3 --- /dev/null +++ b/homeassistant/components/homeassistant_yellow/update.py @@ -0,0 +1,172 @@ +"""Home Assistant Yellow firmware update entity.""" + +from __future__ import annotations + +import logging + +import aiohttp + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import ( + FIRMWARE, + FIRMWARE_VERSION, + NABU_CASA_FIRMWARE_RELEASES_URL, + RADIO_DEVICE, +) + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="yellow_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="yellow_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + None: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> FirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data[FIRMWARE] + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) if firmware_type is not None else None + ] + + entity = FirmwareUpdateEntity( + device=RADIO_DEVICE, + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + NABU_CASA_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """Yellow firmware update entity.""" + + bootloader_reset_type = "yellow" # Triggers a GPIO reset + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the Yellow firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + + self._attr_unique_id = self.entity_description.key + + # Use the cached firmware info if it exists + if self._config_entry.data[FIRMWARE] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]), + firmware_version=self._config_entry.data[FIRMWARE_VERSION], + owners=[], + source="homeassistant_yellow", + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + FIRMWARE: firmware_info.firmware_type, + FIRMWARE_VERSION: firmware_info.firmware_version, + }, + ) + super()._firmware_info_callback(firmware_info) diff --git a/homeassistant/components/homee/__init__.py b/homeassistant/components/homee/__init__.py index 92773dae656..9fd88ee40aa 100644 --- a/homeassistant/components/homee/__init__.py +++ b/homeassistant/components/homee/__init__.py @@ -15,9 +15,11 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.COVER, Platform.LIGHT, + Platform.LOCK, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/homee/binary_sensor.py b/homeassistant/components/homee/binary_sensor.py new file mode 100644 index 00000000000..3f5f5c46a29 --- /dev/null +++ b/homeassistant/components/homee/binary_sensor.py @@ -0,0 +1,190 @@ +"""The Homee binary sensor platform.""" + +from pyHomee.const import AttributeType +from pyHomee.model import HomeeAttribute + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity + +PARALLEL_UPDATES = 0 + +BINARY_SENSOR_DESCRIPTIONS: dict[AttributeType, BinarySensorEntityDescription] = { + AttributeType.BATTERY_LOW_ALARM: BinarySensorEntityDescription( + key="battery", + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.BLACKOUT_ALARM: BinarySensorEntityDescription( + key="blackout_alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.COALARM: BinarySensorEntityDescription( + key="carbon_monoxide", device_class=BinarySensorDeviceClass.CO + ), + AttributeType.CO2ALARM: BinarySensorEntityDescription( + key="carbon_dioxide", device_class=BinarySensorDeviceClass.PROBLEM + ), + AttributeType.FLOOD_ALARM: BinarySensorEntityDescription( + key="flood", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + AttributeType.HIGH_TEMPERATURE_ALARM: BinarySensorEntityDescription( + key="high_temperature", + device_class=BinarySensorDeviceClass.HEAT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.LEAK_ALARM: BinarySensorEntityDescription( + key="leak_alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + AttributeType.LOAD_ALARM: BinarySensorEntityDescription( + key="load_alarm", + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.LOCK_STATE: BinarySensorEntityDescription( + key="lock", + device_class=BinarySensorDeviceClass.LOCK, + ), + AttributeType.LOW_TEMPERATURE_ALARM: BinarySensorEntityDescription( + key="low_temperature", + device_class=BinarySensorDeviceClass.COLD, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MALFUNCTION_ALARM: BinarySensorEntityDescription( + key="malfunction", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MAXIMUM_ALARM: BinarySensorEntityDescription( + key="maximum", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MINIMUM_ALARM: BinarySensorEntityDescription( + key="minimum", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MOTION_ALARM: BinarySensorEntityDescription( + key="motion", + device_class=BinarySensorDeviceClass.MOTION, + ), + AttributeType.MOTOR_BLOCKED_ALARM: BinarySensorEntityDescription( + key="motor_blocked", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.ON_OFF: BinarySensorEntityDescription( + key="plug", + device_class=BinarySensorDeviceClass.PLUG, + ), + AttributeType.OPEN_CLOSE: BinarySensorEntityDescription( + key="opening", + device_class=BinarySensorDeviceClass.OPENING, + ), + AttributeType.OVER_CURRENT_ALARM: BinarySensorEntityDescription( + key="overcurrent", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.OVERLOAD_ALARM: BinarySensorEntityDescription( + key="overload", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.PRESENCE_ALARM: BinarySensorEntityDescription( + key="presence", + device_class=BinarySensorDeviceClass.PRESENCE, + ), + AttributeType.POWER_SUPPLY_ALARM: BinarySensorEntityDescription( + key="power", + device_class=BinarySensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.RAIN_FALL: BinarySensorEntityDescription( + key="rain", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + AttributeType.REPLACE_FILTER_ALARM: BinarySensorEntityDescription( + key="replace_filter", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.SMOKE_ALARM: BinarySensorEntityDescription( + key="smoke", + device_class=BinarySensorDeviceClass.SMOKE, + ), + AttributeType.STORAGE_ALARM: BinarySensorEntityDescription( + key="storage", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.SURGE_ALARM: BinarySensorEntityDescription( + key="surge", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.TAMPER_ALARM: BinarySensorEntityDescription( + key="tamper", + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.VOLTAGE_DROP_ALARM: BinarySensorEntityDescription( + key="voltage_drop", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.WATER_ALARM: BinarySensorEntityDescription( + key="water", + device_class=BinarySensorDeviceClass.MOISTURE, + entity_category=EntityCategory.DIAGNOSTIC, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_devices: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the binary sensor component.""" + + async_add_devices( + HomeeBinarySensor( + attribute, config_entry, BINARY_SENSOR_DESCRIPTIONS[attribute.type] + ) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if attribute.type in BINARY_SENSOR_DESCRIPTIONS and not attribute.editable + ) + + +class HomeeBinarySensor(HomeeEntity, BinarySensorEntity): + """Representation of a Homee binary sensor.""" + + def __init__( + self, + attribute: HomeeAttribute, + entry: HomeeConfigEntry, + description: BinarySensorEntityDescription, + ) -> None: + """Initialize a Homee binary sensor entity.""" + super().__init__(attribute, entry) + + self.entity_description = description + self._attr_translation_key = description.key + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return bool(self._attribute.current_value) diff --git a/homeassistant/components/homee/config_flow.py b/homeassistant/components/homee/config_flow.py index 61d2a3f25a5..1a3c5011f82 100644 --- a/homeassistant/components/homee/config_flow.py +++ b/homeassistant/components/homee/config_flow.py @@ -52,7 +52,7 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except HomeeAuthenticationFailedException: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/homee/lock.py b/homeassistant/components/homee/lock.py new file mode 100644 index 00000000000..4cfc34e11fe --- /dev/null +++ b/homeassistant/components/homee/lock.py @@ -0,0 +1,73 @@ +"""The Homee lock platform.""" + +from typing import Any + +from pyHomee.const import AttributeChangedBy, AttributeType + +from homeassistant.components.lock import LockEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity +from .helpers import get_name_for_enum + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_devices: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the lock component.""" + + async_add_devices( + HomeeLock(attribute, config_entry) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if (attribute.type == AttributeType.LOCK_STATE and attribute.editable) + ) + + +class HomeeLock(HomeeEntity, LockEntity): + """Representation of a Homee lock.""" + + _attr_name = None + + @property + def is_locked(self) -> bool: + """Return if lock is locked.""" + return self._attribute.current_value == 1.0 + + @property + def is_locking(self) -> bool: + """Return if lock is locking.""" + return self._attribute.target_value > self._attribute.current_value + + @property + def is_unlocking(self) -> bool: + """Return if lock is unlocking.""" + return self._attribute.target_value < self._attribute.current_value + + @property + def changed_by(self) -> str: + """Return by whom or what the lock was last changed.""" + changed_id = str(self._attribute.changed_by_id) + changed_by_name = get_name_for_enum( + AttributeChangedBy, self._attribute.changed_by + ) + if self._attribute.changed_by == AttributeChangedBy.USER: + changed_id = self._entry.runtime_data.get_user_by_id( + self._attribute.changed_by_id + ).username + + return f"{changed_by_name}-{changed_id}" + + async def async_lock(self, **kwargs: Any) -> None: + """Lock specified lock. A code to lock the lock with may be specified.""" + await self.async_set_homee_value(1) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock specified lock. A code to unlock the lock with may be specified.""" + await self.async_set_homee_value(0) diff --git a/homeassistant/components/homee/manifest.json b/homeassistant/components/homee/manifest.json index e4622222be1..3c2a99c30dc 100644 --- a/homeassistant/components/homee/manifest.json +++ b/homeassistant/components/homee/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["homee"], "quality_scale": "bronze", - "requirements": ["pyHomee==1.2.7"] + "requirements": ["pyHomee==1.2.8"] } diff --git a/homeassistant/components/homee/strings.json b/homeassistant/components/homee/strings.json index 8b61cc6d28c..da8357d16bc 100644 --- a/homeassistant/components/homee/strings.json +++ b/homeassistant/components/homee/strings.json @@ -26,6 +26,76 @@ } }, "entity": { + "binary_sensor": { + "blackout_alarm": { + "name": "Blackout" + }, + "carbon_dioxide": { + "name": "Carbon dioxide" + }, + "flood": { + "name": "Flood" + }, + "high_temperature": { + "name": "High temperature" + }, + "leak_alarm": { + "name": "Leak" + }, + "load_alarm": { + "name": "Load", + "state": { + "off": "Normal", + "on": "Overload" + } + }, + "low_temperature": { + "name": "Low temperature" + }, + "malfunction": { + "name": "Malfunction" + }, + "maximum": { + "name": "Maximum level" + }, + "minimum": { + "name": "Minimum level" + }, + "motor_blocked": { + "name": "Motor blocked" + }, + "overcurrent": { + "name": "Overcurrent" + }, + "overload": { + "name": "Overload" + }, + "rain": { + "name": "Rain" + }, + "replace_filter": { + "name": "Replace filter", + "state": { + "on": "Replace" + } + }, + "storage": { + "name": "Storage", + "state": { + "off": "Space available", + "on": "Storage full" + } + }, + "surge": { + "name": "Surge" + }, + "voltage_drop": { + "name": "Voltage drop" + }, + "water": { + "name": "Water" + } + }, "button": { "automatic_mode": { "name": "Automatic mode" diff --git a/homeassistant/components/homekit/accessories.py b/homeassistant/components/homekit/accessories.py index 8d10387e239..0d810d6986d 100644 --- a/homeassistant/components/homekit/accessories.py +++ b/homeassistant/components/homekit/accessories.py @@ -15,6 +15,7 @@ from pyhap.service import Service from pyhap.util import callback as pyhap_callback from homeassistant.components.cover import CoverDeviceClass, CoverEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.media_player import MediaPlayerDeviceClass from homeassistant.components.remote import RemoteEntityFeature from homeassistant.components.sensor import SensorDeviceClass @@ -250,6 +251,13 @@ def get_accessory( # noqa: C901 elif state.domain == "vacuum": a_type = "Vacuum" + elif ( + state.domain == "lawn_mower" + and features & LawnMowerEntityFeature.DOCK + and features & LawnMowerEntityFeature.START_MOWING + ): + a_type = "LawnMower" + elif state.domain == "remote" and features & RemoteEntityFeature.ACTIVITY: a_type = "ActivityRemote" diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index 53db7774821..0ef2e8563bc 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -106,6 +106,7 @@ SUPPORTED_DOMAINS = [ "sensor", "switch", "vacuum", + "lawn_mower", "water_heater", VALVE_DOMAIN, ] @@ -123,6 +124,7 @@ DEFAULT_DOMAINS = [ REMOTE_DOMAIN, "switch", "vacuum", + "lawn_mower", "water_heater", ] diff --git a/homeassistant/components/homekit/type_switches.py b/homeassistant/components/homekit/type_switches.py index 0482a5956ac..8c6fc1ed672 100644 --- a/homeassistant/components/homekit/type_switches.py +++ b/homeassistant/components/homekit/type_switches.py @@ -16,6 +16,12 @@ from pyhap.const import ( from homeassistant.components import button, input_button from homeassistant.components.input_select import ATTR_OPTIONS, SERVICE_SELECT_OPTION +from homeassistant.components.lawn_mower import ( + DOMAIN as LAWN_MOWER_DOMAIN, + SERVICE_DOCK, + SERVICE_START_MOWING, + LawnMowerActivity, +) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, @@ -218,6 +224,29 @@ class Vacuum(Switch): self.char_on.set_value(current_state) +@TYPES.register("LawnMower") +class LawnMower(Switch): + """Generate a Switch accessory.""" + + def set_state(self, value: bool) -> None: + """Move switch state to value if call came from HomeKit.""" + _LOGGER.debug("%s: Set switch state to %s", self.entity_id, value) + state = self.hass.states.get(self.entity_id) + assert state + + service = SERVICE_START_MOWING if value else SERVICE_DOCK + self.async_call_service( + LAWN_MOWER_DOMAIN, service, {ATTR_ENTITY_ID: self.entity_id} + ) + + @callback + def async_update_state(self, new_state: State) -> None: + """Update switch state after state changed.""" + current_state = new_state.state in (LawnMowerActivity.MOWING, STATE_ON) + _LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state) + self.char_on.set_value(current_state) + + class ValveBase(HomeAccessory): """Valve base class.""" diff --git a/homeassistant/components/homekit_controller/media_player.py b/homeassistant/components/homekit_controller/media_player.py index 5315c7c89f3..e3b4a760680 100644 --- a/homeassistant/components/homekit_controller/media_player.py +++ b/homeassistant/components/homekit_controller/media_player.py @@ -83,7 +83,7 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity): @property def supported_features(self) -> MediaPlayerEntityFeature: """Flag media player features that are supported.""" - features = MediaPlayerEntityFeature(0) + features = MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON if self.service.has(CharacteristicsTypes.ACTIVE_IDENTIFIER): features |= MediaPlayerEntityFeature.SELECT_SOURCE @@ -177,6 +177,14 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity): return MediaPlayerState.ON + async def async_turn_on(self) -> None: + """Turn the tv on.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 1}) + + async def async_turn_off(self) -> None: + """Turn the tv off.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 0}) + async def async_media_play(self) -> None: """Send play command.""" if self.state == MediaPlayerState.PLAYING: diff --git a/homeassistant/components/homematic/sensor.py b/homeassistant/components/homematic/sensor.py index 24172e196c1..bdd446d7091 100644 --- a/homeassistant/components/homematic/sensor.py +++ b/homeassistant/components/homematic/sensor.py @@ -178,6 +178,7 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = { key="WIND_DIRECTION", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WIND_DIRECTION_RANGE": SensorEntityDescription( key="WIND_DIRECTION_RANGE", diff --git a/homeassistant/components/homematic/strings.json b/homeassistant/components/homematic/strings.json index d962a218a4f..78159189db8 100644 --- a/homeassistant/components/homematic/strings.json +++ b/homeassistant/components/homematic/strings.json @@ -2,7 +2,7 @@ "services": { "virtualkey": { "name": "Virtual key", - "description": "Presses a virtual key from CCU/Homegear or simulate keypress.", + "description": "Simulates a keypress (or other valid action) on CCU/Homegear with virtual or device keys.", "fields": { "address": { "name": "Address", @@ -24,7 +24,7 @@ }, "set_variable_value": { "name": "Set variable value", - "description": "Sets the name of a node.", + "description": "Sets the value of a system variable.", "fields": { "entity_id": { "name": "Entity", diff --git a/homeassistant/components/homematicip_cloud/strings.json b/homeassistant/components/homematicip_cloud/strings.json index 228ebc7500e..7b1b08ac4e2 100644 --- a/homeassistant/components/homematicip_cloud/strings.json +++ b/homeassistant/components/homematicip_cloud/strings.json @@ -35,7 +35,7 @@ "services": { "activate_eco_mode_with_duration": { "name": "Activate eco mode with duration", - "description": "Activates eco mode with period.", + "description": "Activates the eco mode for a specified duration.", "fields": { "duration": { "name": "Duration", @@ -49,7 +49,7 @@ }, "activate_eco_mode_with_period": { "name": "Activate eco more with period", - "description": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::description%]", + "description": "Activates the eco mode until a given time.", "fields": { "endtime": { "name": "Endtime", @@ -63,7 +63,7 @@ }, "activate_vacation": { "name": "Activate vacation", - "description": "Activates the vacation mode until the given time.", + "description": "Activates the vacation mode until a given time.", "fields": { "endtime": { "name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_period::fields::endtime::name%]", diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 96e160ece7b..4ca9e7531e3 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -178,8 +178,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Timeout: _LOGGER.warning("Connection timeout", exc_info=True) errors[CONF_URL] = "connection_timeout" - except Exception: # noqa: BLE001 - _LOGGER.warning("Unknown error connecting to device", exc_info=True) + except Exception: + _LOGGER.exception("Unknown error connecting to device") errors[CONF_URL] = "unknown" return conn @@ -188,8 +188,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): try: conn.close() conn.requests_session.close() - except Exception: # noqa: BLE001 - _LOGGER.debug("Disconnect error", exc_info=True) + except Exception: + _LOGGER.exception("Disconnect error") async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index 2f7f2e55561..6d2e9054c6f 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -11,7 +11,7 @@ } }, "manual": { - "title": "Manual configure a Hue bridge", + "title": "Manually configure a Hue bridge", "data": { "host": "[%key:common::config_flow::data::host%]" }, @@ -46,8 +46,8 @@ "button_2": "Second button", "button_3": "Third button", "button_4": "Fourth button", - "double_buttons_1_3": "First and Third buttons", - "double_buttons_2_4": "Second and Fourth buttons", + "double_buttons_1_3": "First and third button", + "double_buttons_2_4": "Second and fourth button", "dim_down": "Dim down", "dim_up": "Dim up", "turn_off": "[%key:common::action::turn_off%]", @@ -102,6 +102,28 @@ } } }, + "light": { + "hue_light": { + "state_attributes": { + "effect": { + "state": { + "candle": "Candle", + "sparkle": "Sparkle", + "glisten": "Glisten", + "sunrise": "Sunrise", + "sunset": "Sunset", + "fire": "Fire", + "prism": "Prism", + "opal": "Opal", + "underwater": "Underwater", + "cosmos": "Cosmos", + "sunbeam": "Sunbeam", + "enchant": "Enchant" + } + } + } + } + }, "sensor": { "zigbee_connectivity": { "name": "Zigbee connectivity", diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index 4b00299bc9d..757b69c7b7b 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -18,6 +18,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_TRANSITION, ATTR_XY_COLOR, + EFFECT_OFF, FLASH_SHORT, ColorMode, LightEntity, @@ -39,7 +40,6 @@ from .helpers import ( normalize_hue_transition, ) -EFFECT_NONE = "None" FALLBACK_MIN_KELVIN = 6500 FALLBACK_MAX_KELVIN = 2000 FALLBACK_KELVIN = 5800 # halfway @@ -75,7 +75,7 @@ class HueLight(HueBaseEntity, LightEntity): _fixed_color_mode: ColorMode | None = None entity_description = LightEntityDescription( - key="hue_light", has_entity_name=True, name=None + key="hue_light", translation_key="hue_light", has_entity_name=True, name=None ) def __init__( @@ -118,7 +118,7 @@ class HueLight(HueBaseEntity, LightEntity): if x != TimedEffectStatus.NO_EFFECT ] if len(self._attr_effect_list) > 0: - self._attr_effect_list.insert(0, EFFECT_NONE) + self._attr_effect_list.insert(0, EFFECT_OFF) self._attr_supported_features |= LightEntityFeature.EFFECT @property @@ -211,7 +211,7 @@ class HueLight(HueBaseEntity, LightEntity): if timed_effects := self.resource.timed_effects: if timed_effects.status != TimedEffectStatus.NO_EFFECT: return timed_effects.status.value - return EFFECT_NONE + return EFFECT_OFF async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" @@ -233,12 +233,12 @@ class HueLight(HueBaseEntity, LightEntity): self._color_temp_active = color_temp is not None flash = kwargs.get(ATTR_FLASH) effect = effect_str = kwargs.get(ATTR_EFFECT) - if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()): - # ignore effect if set to "None" and we have no effect active - # the special effect "None" is only used to stop an active effect + if effect_str == EFFECT_OFF: + # ignore effect if set to "off" and we have no effect active + # the special effect "off" is only used to stop an active effect # but sending it while no effect is active can actually result in issues # https://github.com/home-assistant/core/issues/122165 - effect = None if self.effect == EFFECT_NONE else EffectStatus.NO_EFFECT + effect = None if self.effect == EFFECT_OFF else EffectStatus.NO_EFFECT elif effect_str is not None: # work out if we got a regular effect or timed effect effect = EffectStatus(effect_str) diff --git a/homeassistant/components/humidifier/strings.json b/homeassistant/components/humidifier/strings.json index 753368dc572..436f7df8312 100644 --- a/homeassistant/components/humidifier/strings.json +++ b/homeassistant/components/humidifier/strings.json @@ -89,7 +89,7 @@ "fields": { "mode": { "name": "Mode", - "description": "Operation mode. For example, _normal_, _eco_, or _away_. For a list of possible values, refer to the integration documentation." + "description": "Operation mode. For example, \"normal\", \"eco\", or \"away\". For a list of possible values, refer to the integration documentation." } } }, diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 7efed529453..31ca5eef0cd 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -54,7 +54,8 @@ class HusqvarnaConfigFlowHandler( automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) try: status_data = await automower_api.get_status() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") if status_data == {}: return self.async_abort(reason="no_mower_connected") diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index 819ee41a43d..9456074596a 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -13,7 +13,7 @@ from aioautomower.exceptions import ( HusqvarnaTimeoutError, HusqvarnaWSServerHandshakeError, ) -from aioautomower.model import MowerAttributes +from aioautomower.model import MowerDictionary from aioautomower.session import AutomowerSession from homeassistant.config_entries import ConfigEntry @@ -32,7 +32,7 @@ DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time type AutomowerConfigEntry = ConfigEntry[AutomowerDataUpdateCoordinator] -class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): +class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]): """Class to manage fetching Husqvarna data.""" config_entry: AutomowerConfigEntry @@ -61,7 +61,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib self._zones_last_update: dict[str, set[str]] = {} self._areas_last_update: dict[str, set[int]] = {} - async def _async_update_data(self) -> dict[str, MowerAttributes]: + async def _async_update_data(self) -> MowerDictionary: """Subscribe for websocket and poll data from the API.""" if not self.ws_connected: await self.api.connect() @@ -84,7 +84,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib return data @callback - def callback(self, ws_data: dict[str, MowerAttributes]) -> None: + def callback(self, ws_data: MowerDictionary) -> None: """Process websocket callbacks and write them to the DataUpdateCoordinator.""" self.async_set_updated_data(ws_data) @@ -119,7 +119,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib "reconnect_task", ) - def _async_add_remove_devices(self, data: dict[str, MowerAttributes]) -> None: + def _async_add_remove_devices(self, data: MowerDictionary) -> None: """Add new device, remove non-existing device.""" current_devices = set(data) @@ -159,9 +159,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib for mower_callback in self.new_devices_callbacks: mower_callback(new_devices) - def _async_add_remove_stay_out_zones( - self, data: dict[str, MowerAttributes] - ) -> None: + def _async_add_remove_stay_out_zones(self, data: MowerDictionary) -> None: """Add new stay-out zones, remove non-existing stay-out zones.""" current_zones = { mower_id: set(mower_data.stay_out_zones.zones) @@ -207,7 +205,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib return current_zones - def _async_add_remove_work_areas(self, data: dict[str, MowerAttributes]) -> None: + def _async_add_remove_work_areas(self, data: MowerDictionary) -> None: """Add new work areas, remove non-existing work areas.""" current_areas = { mower_id: set(mower_data.work_areas) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 0eabf5ec0d6..7f728148be3 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_push", "loggers": ["aioautomower"], "quality_scale": "silver", - "requirements": ["aioautomower==2025.1.1"] + "requirements": ["aioautomower==2025.3.2"] } diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index cdcf4b45a2d..9ed00113d4b 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -44,7 +44,7 @@ async def async_set_work_area_cutting_height( ) -> None: """Set cutting height for work area.""" await coordinator.api.commands.workarea_settings( - mower_id, int(cheight), work_area_id + mower_id, work_area_id, cutting_height=int(cheight) ) diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 2e1d4041e5a..75af24ee0ee 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -295,6 +295,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.cutting_blade_usage_time is not None, value_fn=attrgetter("statistics.cutting_blade_usage_time"), ), + AutomowerSensorEntityDescription( + key="downtime", + translation_key="downtime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.downtime is not None, + value_fn=attrgetter("statistics.downtime"), + ), AutomowerSensorEntityDescription( key="total_charging_time", translation_key="total_charging_time", @@ -367,6 +379,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.total_drive_distance is not None, value_fn=attrgetter("statistics.total_drive_distance"), ), + AutomowerSensorEntityDescription( + key="uptime", + translation_key="uptime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.uptime is not None, + value_fn=attrgetter("statistics.uptime"), + ), AutomowerSensorEntityDescription( key="next_start_timestamp", translation_key="next_start_timestamp", diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 9bd0bb06b3e..35ce342867f 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -221,6 +221,9 @@ "cutting_blade_usage_time": { "name": "Cutting blade usage time" }, + "downtime": { + "name": "Downtime" + }, "restricted_reason": { "name": "Restricted reason", "state": { @@ -263,6 +266,9 @@ "demo": "Demo" } }, + "uptime": { + "name": "Uptime" + }, "work_area": { "name": "Work area", "state": { diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index 73423882e4a..0c355c34a71 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2025.2.0"] + "requirements": ["pydrawise==2025.3.0"] } diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 7486973638b..ccac87a75e0 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -7,7 +7,7 @@ "address": "Device" }, "data_description": { - "address": "The bluetooth device for the desk." + "address": "The Bluetooth device for the desk." } } }, diff --git a/homeassistant/components/imgw_pib/__init__.py b/homeassistant/components/imgw_pib/__init__.py index f9524316570..4bceee51f8e 100644 --- a/homeassistant/components/imgw_pib/__init__.py +++ b/homeassistant/components/imgw_pib/__init__.py @@ -38,7 +38,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ImgwPibConfigEntry) -> b hydrological_details=False, ) except (ClientError, TimeoutError, ApiError) as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err coordinator = ImgwPibDataUpdateCoordinator(hass, entry, imgwpib, station_id) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/imgw_pib/config_flow.py b/homeassistant/components/imgw_pib/config_flow.py index 558528fcbef..805bfa2ccb3 100644 --- a/homeassistant/components/imgw_pib/config_flow.py +++ b/homeassistant/components/imgw_pib/config_flow.py @@ -50,7 +50,7 @@ class ImgwPibFlowHandler(ConfigFlow, domain=DOMAIN): hydrological_data = await imgwpib.get_hydrological_data() except (ClientError, TimeoutError, ApiError): errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/imgw_pib/coordinator.py b/homeassistant/components/imgw_pib/coordinator.py index fbe470ca953..f74878d672c 100644 --- a/homeassistant/components/imgw_pib/coordinator.py +++ b/homeassistant/components/imgw_pib/coordinator.py @@ -63,4 +63,11 @@ class ImgwPibDataUpdateCoordinator(DataUpdateCoordinator[HydrologicalData]): try: return await self.imgwpib.get_hydrological_data() except ApiError as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(err), + }, + ) from err diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index 0ecc1b4b7d0..3d8b34055fd 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.9"] + "requirements": ["imgw_pib==1.0.10"] } diff --git a/homeassistant/components/imgw_pib/sensor.py b/homeassistant/components/imgw_pib/sensor.py index 33b82bbb43b..7871006b2ae 100644 --- a/homeassistant/components/imgw_pib/sensor.py +++ b/homeassistant/components/imgw_pib/sensor.py @@ -24,7 +24,8 @@ from .const import DOMAIN from .coordinator import ImgwPibConfigEntry, ImgwPibDataUpdateCoordinator from .entity import ImgwPibEntity -PARALLEL_UPDATES = 1 +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/imgw_pib/strings.json b/homeassistant/components/imgw_pib/strings.json index 9a17dcf7087..33cd3cb3917 100644 --- a/homeassistant/components/imgw_pib/strings.json +++ b/homeassistant/components/imgw_pib/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "station_id": "Hydrological station" + }, + "data_description": { + "station_id": "Select a hydrological station from the list." } } }, @@ -25,5 +28,13 @@ "name": "Water temperature" } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the IMGW-PIB API for {entry}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the IMGW-PIB API for {entry}: {error}" + } } } diff --git a/homeassistant/components/improv_ble/__init__.py b/homeassistant/components/improv_ble/__init__.py index 985684cb5b8..ff40b65a8d0 100644 --- a/homeassistant/components/improv_ble/__init__.py +++ b/homeassistant/components/improv_ble/__init__.py @@ -1 +1,11 @@ """The Improv BLE integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up improv_ble from a config entry.""" + raise NotImplementedError diff --git a/homeassistant/components/improv_ble/config_flow.py b/homeassistant/components/improv_ble/config_flow.py index 22f2bf3623c..0dcefba6428 100644 --- a/homeassistant/components/improv_ble/config_flow.py +++ b/homeassistant/components/improv_ble/config_flow.py @@ -83,12 +83,9 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN): self._discovery_info = self._discovered_devices[address] return await self.async_step_start_improv() - current_addresses = self._async_current_ids() for discovery in bluetooth.async_discovered_service_info(self.hass): - if ( - discovery.address in current_addresses - or discovery.address in self._discovered_devices - or not device_filter(discovery.advertisement) + if discovery.address in self._discovered_devices or not device_filter( + discovery.advertisement ): continue self._discovered_devices[discovery.address] = discovery @@ -364,6 +361,18 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN): assert self._provision_result is not None result = self._provision_result + if result["type"] == "abort" and result["reason"] in ( + "provision_successful", + "provision_successful_url", + ): + # Delete ignored config entry, if it exists + address = self.context["unique_id"] + current_entries = self._async_current_entries(include_ignore=True) + for entry in current_entries: + if entry.unique_id == address: + _LOGGER.debug("Removing ignored entry: %s", entry) + await self.hass.config_entries.async_remove(entry.entry_id) + break self._provision_result = None return result diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index 875bc25bd2f..027c3ad4691 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from incomfortclient import InvalidGateway, InvalidHeaterList @@ -31,6 +32,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from .const import CONF_LEGACY_SETPOINT_STATUS, DOMAIN from .coordinator import InComfortConfigEntry, async_connect_gateway +_LOGGER = logging.getLogger(__name__) TITLE = "Intergas InComfort/Intouch Lan2RF gateway" CONFIG_SCHEMA = vol.Schema( @@ -88,7 +90,8 @@ async def async_try_connect_gateway( return {"base": "no_heaters"} except TimeoutError: return {"base": "timeout_error"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} return None diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index d02b1d27554..825f198dd30 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -10,5 +10,6 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], + "quality_scale": "platinum", "requirements": ["incomfort-client==0.6.7"] } diff --git a/homeassistant/components/incomfort/quality_scale.yaml b/homeassistant/components/incomfort/quality_scale.yaml new file mode 100644 index 00000000000..f5af3c9d061 --- /dev/null +++ b/homeassistant/components/incomfort/quality_scale.yaml @@ -0,0 +1,77 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No actions implemented. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No actions implemented. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: | + Entities are set up dand updated through the datacoordimator. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: > + There is a maximum of 3 heaters that can be discovered by the gateway. + The user must remove manually any heeater devices that have been replaced. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: done + discovery-update-info: done + repair-issues: + status: exempt + comment: | + No current issues to repair. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: + status: done + comment: There are no known limmitations, + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/inkbird/manifest.json b/homeassistant/components/inkbird/manifest.json index acc7414edac..aaa9c4b3473 100644 --- a/homeassistant/components/inkbird/manifest.json +++ b/homeassistant/components/inkbird/manifest.json @@ -21,6 +21,18 @@ { "local_name": "tps", "connectable": false + }, + { + "local_name": "ITH-11-B", + "connectable": false + }, + { + "local_name": "ITH-13-B", + "connectable": false + }, + { + "local_name": "ITH-21-B", + "connectable": false } ], "codeowners": ["@bdraco"], @@ -28,5 +40,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/inkbird", "iot_class": "local_push", - "requirements": ["inkbird-ble==0.7.1"] + "requirements": ["inkbird-ble==0.9.0"] } diff --git a/homeassistant/components/iometer/__init__.py b/homeassistant/components/iometer/__init__.py index bbf046e70e9..feb7ce9b8cf 100644 --- a/homeassistant/components/iometer/__init__.py +++ b/homeassistant/components/iometer/__init__.py @@ -12,7 +12,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .coordinator import IOmeterConfigEntry, IOMeterCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: IOmeterConfigEntry) -> bool: diff --git a/homeassistant/components/iometer/binary_sensor.py b/homeassistant/components/iometer/binary_sensor.py new file mode 100644 index 00000000000..f443c4ae94a --- /dev/null +++ b/homeassistant/components/iometer/binary_sensor.py @@ -0,0 +1,87 @@ +"""IOmeter binary sensor.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import IOMeterCoordinator, IOmeterData +from .entity import IOmeterEntity + + +@dataclass(frozen=True, kw_only=True) +class IOmeterBinarySensorDescription(BinarySensorEntityDescription): + """Describes Iometer binary sensor entity.""" + + value_fn: Callable[[IOmeterData], bool | None] + + +SENSOR_TYPES: list[IOmeterBinarySensorDescription] = [ + IOmeterBinarySensorDescription( + key="connection_status", + translation_key="connection_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.connection_status == "connected" + if data.status.device.core.connection_status is not None + else None + ), + ), + IOmeterBinarySensorDescription( + key="attachment_status", + translation_key="attachment_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.attachment_status == "attached" + if data.status.device.core.attachment_status is not None + else None + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Sensors.""" + coordinator: IOMeterCoordinator = config_entry.runtime_data + + async_add_entities( + IOmeterBinarySensor( + coordinator=coordinator, + description=description, + ) + for description in SENSOR_TYPES + ) + + +class IOmeterBinarySensor(IOmeterEntity, BinarySensorEntity): + """Defines a IOmeter binary sensor.""" + + entity_description: IOmeterBinarySensorDescription + + def __init__( + self, + coordinator: IOMeterCoordinator, + description: IOmeterBinarySensorDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.identifier}_{description.key}" + + @property + def is_on(self) -> bool | None: + """Return the binary sensor state.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/iometer/coordinator.py b/homeassistant/components/iometer/coordinator.py index 708983fb28e..4050341151b 100644 --- a/homeassistant/components/iometer/coordinator.py +++ b/homeassistant/components/iometer/coordinator.py @@ -8,6 +8,7 @@ from iometer import IOmeterClient, IOmeterConnectionError, Reading, Status from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -31,6 +32,7 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]): config_entry: IOmeterConfigEntry client: IOmeterClient + current_fw_version: str = "" def __init__( self, @@ -58,4 +60,17 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]): except IOmeterConnectionError as error: raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error + fw_version = f"{status.device.core.version}/{status.device.bridge.version}" + if self.current_fw_version and fw_version != self.current_fw_version: + device_registry = dr.async_get(self.hass) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, status.device.id)} + ) + assert device_entry + device_registry.async_update_device( + device_entry.id, + sw_version=fw_version, + ) + self.current_fw_version = fw_version + return IOmeterData(reading=reading, status=status) diff --git a/homeassistant/components/iometer/entity.py b/homeassistant/components/iometer/entity.py index 86494857e18..a52ef1c66ed 100644 --- a/homeassistant/components/iometer/entity.py +++ b/homeassistant/components/iometer/entity.py @@ -20,5 +20,5 @@ class IOmeterEntity(CoordinatorEntity[IOMeterCoordinator]): identifiers={(DOMAIN, status.device.id)}, manufacturer="IOmeter GmbH", model="IOmeter", - sw_version=f"{status.device.core.version}/{status.device.bridge.version}", + sw_version=coordinator.current_fw_version, ) diff --git a/homeassistant/components/iometer/strings.json b/homeassistant/components/iometer/strings.json index 31deb16aa9c..b3878dd1b53 100644 --- a/homeassistant/components/iometer/strings.json +++ b/homeassistant/components/iometer/strings.json @@ -60,6 +60,14 @@ "wifi_rssi": { "name": "Signal strength Wi-Fi" } + }, + "binary_sensor": { + "connection_status": { + "name": "Core/Bridge connection status" + }, + "attachment_status": { + "name": "Core attachment status" + } } } } diff --git a/homeassistant/components/iqvia/strings.json b/homeassistant/components/iqvia/strings.json index 5dc0dea53d5..a0697a6c210 100644 --- a/homeassistant/components/iqvia/strings.json +++ b/homeassistant/components/iqvia/strings.json @@ -4,7 +4,7 @@ "user": { "description": "Fill out your U.S. or Canadian ZIP code.", "data": { - "zip_code": "ZIP Code" + "zip_code": "ZIP code" } } }, diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index ee54e502c26..0a8ed6e9ddb 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -8,6 +8,7 @@ import datetime from enum import StrEnum import logging +from homeassistant.components.recorder.models import StatisticMeanType from homeassistant.components.recorder.models.statistics import ( StatisticData, StatisticMetaData, @@ -270,7 +271,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): ] metadata: StatisticMetaData = { - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": f"{self.device_entry.name} {self.name}", "source": DOMAIN, diff --git a/homeassistant/components/isy994/sensor.py b/homeassistant/components/isy994/sensor.py index 2655f4d3c4e..2d27f4602c6 100644 --- a/homeassistant/components/isy994/sensor.py +++ b/homeassistant/components/isy994/sensor.py @@ -97,9 +97,9 @@ ISY_CONTROL_TO_DEVICE_CLASS = { "WEIGHT": SensorDeviceClass.WEIGHT, "WINDCH": SensorDeviceClass.TEMPERATURE, } -ISY_CONTROL_TO_STATE_CLASS = { - control: SensorStateClass.MEASUREMENT for control in ISY_CONTROL_TO_DEVICE_CLASS -} +ISY_CONTROL_TO_STATE_CLASS = dict.fromkeys( + ISY_CONTROL_TO_DEVICE_CLASS, SensorStateClass.MEASUREMENT +) ISY_CONTROL_TO_ENTITY_CATEGORY = { PROP_RAMP_RATE: EntityCategory.DIAGNOSTIC, PROP_ON_LEVEL: EntityCategory.DIAGNOSTIC, diff --git a/homeassistant/components/jellyfin/client_wrapper.py b/homeassistant/components/jellyfin/client_wrapper.py index ab5d5e7d7f8..91fe0885e4c 100644 --- a/homeassistant/components/jellyfin/client_wrapper.py +++ b/homeassistant/components/jellyfin/client_wrapper.py @@ -97,16 +97,27 @@ def get_artwork_url( client: JellyfinClient, item: dict[str, Any], max_width: int = 600 ) -> str | None: """Find a suitable thumbnail for an item.""" - artwork_id: str = item["Id"] - artwork_type = "Primary" + artwork_id: str | None = None + artwork_type: str | None = None parent_backdrop_id: str | None = item.get("ParentBackdropItemId") - if "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: + if "AlbumPrimaryImageTag" in item: + # jellyfin_apiclient_python doesn't support passing a specific tag to `.artwork`, + # so we don't use the actual value of AlbumPrimaryImageTag. + # However, its mere presence tells us that the album does have primary artwork, + # and the resulting URL will pull the primary album art even if the tag is not specified. + artwork_type = "Primary" + artwork_id = item["AlbumId"] + elif "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: artwork_type = "Backdrop" + artwork_id = item["Id"] elif parent_backdrop_id: artwork_type = "Backdrop" artwork_id = parent_backdrop_id - elif "Primary" not in item[ITEM_KEY_IMAGE_TAGS]: + elif "Primary" in item[ITEM_KEY_IMAGE_TAGS]: + artwork_type = "Primary" + artwork_id = item["Id"] + else: return None return str(client.jellyfin.artwork(artwork_id, artwork_type, max_width)) diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index 823e9bd59be..47d60d74938 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from functools import partial +import logging from hdate import Location @@ -14,7 +15,9 @@ from homeassistant.const import ( CONF_TIME_ZONE, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -24,10 +27,21 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, + DOMAIN, ) from .entity import JewishCalendarConfigEntry, JewishCalendarData +from .service import async_setup_services +_LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Jewish Calendar service.""" + async_setup_services(hass) + + return True async def async_setup_entry( @@ -80,3 +94,49 @@ async def async_unload_entry( ) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + +async def async_migrate_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: + """Migrate old entry.""" + + _LOGGER.debug("Migrating from version %s", config_entry.version) + + @callback + def update_unique_id( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + """Update unique ID of entity entry.""" + key_translations = { + "first_light": "alot_hashachar", + "talit": "talit_and_tefillin", + "sunrise": "netz_hachama", + "gra_end_shma": "sof_zman_shema_gra", + "mga_end_shma": "sof_zman_shema_mga", + "gra_end_tfila": "sof_zman_tfilla_gra", + "mga_end_tfila": "sof_zman_tfilla_mga", + "midday": "chatzot_hayom", + "big_mincha": "mincha_gedola", + "small_mincha": "mincha_ketana", + "plag_mincha": "plag_hamincha", + "sunset": "shkia", + "first_stars": "tset_hakohavim_tsom", + "three_stars": "tset_hakohavim_shabbat", + } + old_keys = tuple(key_translations.keys()) + if entity_entry.unique_id.endswith(old_keys): + old_key = entity_entry.unique_id.split("-")[1] + new_unique_id = f"{config_entry.entry_id}-{key_translations[old_key]}" + return {"new_unique_id": new_unique_id} + return None + + if config_entry.version > 1: + # This means the user has downgraded from a future version + return False + + if config_entry.version == 1: + await er.async_migrate_entries(hass, config_entry.entry_id, update_unique_id) + hass.config_entries.async_update_entry(config_entry, version=2) + + return True diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 5ff3171b7de..f33d79a01f5 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -5,9 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass import datetime as dt -from datetime import datetime -import hdate from hdate.zmanim import Zmanim from homeassistant.components.binary_sensor import ( @@ -27,7 +25,7 @@ from .entity import JewishCalendarConfigEntry, JewishCalendarEntity class JewishCalendarBinarySensorMixIns(BinarySensorEntityDescription): """Binary Sensor description mixin class for Jewish Calendar.""" - is_on: Callable[[Zmanim], bool] = lambda _: False + is_on: Callable[[Zmanim, dt.datetime], bool] = lambda _, __: False @dataclass(frozen=True) @@ -42,18 +40,18 @@ BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = ( key="issur_melacha_in_effect", name="Issur Melacha in Effect", icon="mdi:power-plug-off", - is_on=lambda state: bool(state.issur_melacha_in_effect), + is_on=lambda state, now: bool(state.issur_melacha_in_effect(now)), ), JewishCalendarBinarySensorEntityDescription( key="erev_shabbat_hag", name="Erev Shabbat/Hag", - is_on=lambda state: bool(state.erev_shabbat_chag), + is_on=lambda state, now: bool(state.erev_shabbat_chag(now)), entity_registry_enabled_default=False, ), JewishCalendarBinarySensorEntityDescription( key="motzei_shabbat_hag", name="Motzei Shabbat/Hag", - is_on=lambda state: bool(state.motzei_shabbat_chag), + is_on=lambda state, now: bool(state.motzei_shabbat_chag(now)), entity_registry_enabled_default=False, ), ) @@ -84,16 +82,16 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): def is_on(self) -> bool: """Return true if sensor is on.""" zmanim = self._get_zmanim() - return self.entity_description.is_on(zmanim) + return self.entity_description.is_on(zmanim, dt_util.now()) def _get_zmanim(self) -> Zmanim: """Return the Zmanim object for now().""" - return hdate.Zmanim( - date=dt_util.now(), + return Zmanim( + date=dt.date.today(), location=self._location, candle_lighting_offset=self._candle_lighting_offset, havdalah_offset=self._havdalah_offset, - hebrew=self._hebrew, + language=self._language, ) async def async_added_to_hass(self) -> None: @@ -109,7 +107,7 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): return await super().async_will_remove_from_hass() @callback - def _update(self, now: datetime | None = None) -> None: + def _update(self, now: dt.datetime | None = None) -> None: """Update the state of the sensor.""" self._update_unsub = None self._schedule_update() @@ -119,7 +117,7 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): """Schedule the next update of the sensor.""" now = dt_util.now() zmanim = self._get_zmanim() - update = zmanim.zmanim["sunrise"] + dt.timedelta(days=1) + update = zmanim.netz_hachama.local + dt.timedelta(days=1) candle_lighting = zmanim.candle_lighting if candle_lighting is not None and now < candle_lighting < update: update = candle_lighting diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index a2eadbf57bd..23bcb23435b 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -86,7 +86,7 @@ def _get_data_schema(hass: HomeAssistant) -> vol.Schema: class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Jewish calendar.""" - VERSION = 1 + VERSION = 2 @staticmethod @callback diff --git a/homeassistant/components/jewish_calendar/const.py b/homeassistant/components/jewish_calendar/const.py index 4af76a8927b..0d5455fcd86 100644 --- a/homeassistant/components/jewish_calendar/const.py +++ b/homeassistant/components/jewish_calendar/const.py @@ -2,6 +2,9 @@ DOMAIN = "jewish_calendar" +ATTR_DATE = "date" +ATTR_NUSACH = "nusach" + CONF_DIASPORA = "diaspora" CONF_CANDLE_LIGHT_MINUTES = "candle_lighting_minutes_before_sunset" CONF_HAVDALAH_OFFSET_MINUTES = "havdalah_minutes_after_sunset" @@ -11,3 +14,5 @@ DEFAULT_CANDLE_LIGHT = 18 DEFAULT_DIASPORA = False DEFAULT_HAVDALAH_OFFSET_MINUTES = 0 DEFAULT_LANGUAGE = "english" + +SERVICE_COUNT_OMER = "count_omer" diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index 1d2a6e45c0a..2c031f0d160 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from hdate import Location +from hdate.translator import Language from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -17,7 +18,7 @@ type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] class JewishCalendarData: """Jewish Calendar runtime dataclass.""" - language: str + language: Language diaspora: bool location: Location candle_lighting_offset: int @@ -43,7 +44,6 @@ class JewishCalendarEntity(Entity): ) data = config_entry.runtime_data self._location = data.location - self._hebrew = data.language == "hebrew" self._language = data.language self._candle_lighting_offset = data.candle_lighting_offset self._havdalah_offset = data.havdalah_offset diff --git a/homeassistant/components/jewish_calendar/icons.json b/homeassistant/components/jewish_calendar/icons.json new file mode 100644 index 00000000000..24b922df7a2 --- /dev/null +++ b/homeassistant/components/jewish_calendar/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "count_omer": { + "service": "mdi:counter" + } + } +} diff --git a/homeassistant/components/jewish_calendar/manifest.json b/homeassistant/components/jewish_calendar/manifest.json index aca45320002..877c4cf9a99 100644 --- a/homeassistant/components/jewish_calendar/manifest.json +++ b/homeassistant/components/jewish_calendar/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/jewish_calendar", "iot_class": "calculated", "loggers": ["hdate"], - "requirements": ["hdate==0.11.1"], + "requirements": ["hdate[astral]==1.0.3"], "single_config_entry": true } diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index eee1d966ae6..7cb281b3af4 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -2,12 +2,13 @@ from __future__ import annotations -from datetime import date as Date +import datetime as dt import logging -from typing import Any, cast +from typing import Any -from hdate import HDate, HebrewDate, htables -from hdate.zmanim import Zmanim +from hdate import HDateInfo, Zmanim +from hdate.holidays import HolidayDatabase +from hdate.parasha import Parasha from homeassistant.components.sensor import ( SensorDeviceClass, @@ -59,83 +60,83 @@ INFO_SENSORS: tuple[SensorEntityDescription, ...] = ( TIME_SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( - key="first_light", + key="alot_hashachar", name="Alot Hashachar", # codespell:ignore alot icon="mdi:weather-sunset-up", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="talit", + key="talit_and_tefillin", name="Talit and Tefillin", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="sunrise", + key="netz_hachama", name="Hanetz Hachama", icon="mdi:calendar-clock", ), SensorEntityDescription( - key="gra_end_shma", + key="sof_zman_shema_gra", name='Latest time for Shma Gr"a', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="mga_end_shma", + key="sof_zman_shema_mga", name='Latest time for Shma MG"A', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="gra_end_tfila", + key="sof_zman_tfilla_gra", name='Latest time for Tefilla Gr"a', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="mga_end_tfila", + key="sof_zman_tfilla_mga", name='Latest time for Tefilla MG"A', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="midday", + key="chatzot_hayom", name="Chatzot Hayom", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="big_mincha", + key="mincha_gedola", name="Mincha Gedola", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="small_mincha", + key="mincha_ketana", name="Mincha Ketana", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="plag_mincha", + key="plag_hamincha", name="Plag Hamincha", icon="mdi:weather-sunset-down", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="sunset", + key="shkia", name="Shkia", icon="mdi:weather-sunset", ), SensorEntityDescription( - key="first_stars", + key="tset_hakohavim_tsom", name="T'set Hakochavim", icon="mdi:weather-night", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="three_stars", + key="tset_hakohavim_shabbat", name="T'set Hakochavim, 3 stars", icon="mdi:weather-night", entity_registry_enabled_default=False, @@ -212,7 +213,9 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): _LOGGER.debug("Now: %s Sunset: %s", now, sunset) - daytime_date = HDate(today, diaspora=self._diaspora, hebrew=self._hebrew) + daytime_date = HDateInfo( + today, diaspora=self._diaspora, language=self._language + ) # The Jewish day starts after darkness (called "tzais") and finishes at # sunset ("shkia"). The time in between is a gray area @@ -238,14 +241,14 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): "New value for %s: %s", self.entity_description.key, self._attr_native_value ) - def make_zmanim(self, date: Date) -> Zmanim: + def make_zmanim(self, date: dt.date) -> Zmanim: """Create a Zmanim object.""" return Zmanim( date=date, location=self._location, candle_lighting_offset=self._candle_lighting_offset, havdalah_offset=self._havdalah_offset, - hebrew=self._hebrew, + language=self._language, ) @property @@ -254,43 +257,40 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): return self._attrs def get_state( - self, daytime_date: HDate, after_shkia_date: HDate, after_tzais_date: HDate + self, + daytime_date: HDateInfo, + after_shkia_date: HDateInfo, + after_tzais_date: HDateInfo, ) -> Any | None: """For a given type of sensor, return the state.""" # Terminology note: by convention in py-libhdate library, "upcoming" # refers to "current" or "upcoming" dates. if self.entity_description.key == "date": - hdate = cast(HebrewDate, after_shkia_date.hdate) - month = htables.MONTHS[hdate.month.value - 1] + hdate = after_shkia_date.hdate + hdate.month.set_language(self._language) self._attrs = { - "hebrew_year": hdate.year, - "hebrew_month_name": month.hebrew if self._hebrew else month.english, - "hebrew_day": hdate.day, + "hebrew_year": str(hdate.year), + "hebrew_month_name": str(hdate.month), + "hebrew_day": str(hdate.day), } - return after_shkia_date.hebrew_date + return after_shkia_date.hdate if self.entity_description.key == "weekly_portion": - self._attr_options = [ - (p.hebrew if self._hebrew else p.english) for p in htables.PARASHAOT - ] + self._attr_options = list(Parasha) # Compute the weekly portion based on the upcoming shabbat. return after_tzais_date.upcoming_shabbat.parasha if self.entity_description.key == "holiday": - _id = _type = _type_id = "" - _holiday_type = after_shkia_date.holiday_type - if isinstance(_holiday_type, list): - _id = ", ".join(after_shkia_date.holiday_name) - _type = ", ".join([_htype.name for _htype in _holiday_type]) - _type_id = ", ".join([str(_htype.value) for _htype in _holiday_type]) - else: - _id = after_shkia_date.holiday_name - _type = _holiday_type.name - _type_id = _holiday_type.value - self._attrs = {"id": _id, "type": _type, "type_id": _type_id} - self._attr_options = htables.get_all_holidays(self._language) - - return after_shkia_date.holiday_description + _holidays = after_shkia_date.holidays + _id = ", ".join(holiday.name for holiday in _holidays) + _type = ", ".join( + dict.fromkeys(_holiday.type.name for _holiday in _holidays) + ) + self._attrs = {"id": _id, "type": _type} + self._attr_options = HolidayDatabase(self._diaspora).get_all_names( + self._language + ) + return ", ".join(str(holiday) for holiday in _holidays) if _holidays else "" if self.entity_description.key == "omer_count": - return after_shkia_date.omer_day + return after_shkia_date.omer.total_days if after_shkia_date.omer else 0 if self.entity_description.key == "daf_yomi": return daytime_date.daf_yomi @@ -303,7 +303,10 @@ class JewishCalendarTimeSensor(JewishCalendarSensor): _attr_device_class = SensorDeviceClass.TIMESTAMP def get_state( - self, daytime_date: HDate, after_shkia_date: HDate, after_tzais_date: HDate + self, + daytime_date: HDateInfo, + after_shkia_date: HDateInfo, + after_tzais_date: HDateInfo, ) -> Any | None: """For a given type of sensor, return the state.""" if self.entity_description.key == "upcoming_shabbat_candle_lighting": @@ -325,5 +328,5 @@ class JewishCalendarTimeSensor(JewishCalendarSensor): ) return times.havdalah - times = self.make_zmanim(dt_util.now()).zmanim - return times[self.entity_description.key] + times = self.make_zmanim(dt_util.now().date()) + return times.zmanim[self.entity_description.key].local diff --git a/homeassistant/components/jewish_calendar/service.py b/homeassistant/components/jewish_calendar/service.py new file mode 100644 index 00000000000..7c3c7a21f1c --- /dev/null +++ b/homeassistant/components/jewish_calendar/service.py @@ -0,0 +1,63 @@ +"""Services for Jewish Calendar.""" + +import datetime +from typing import cast + +from hdate import HebrewDate +from hdate.omer import Nusach, Omer +from hdate.translator import Language +import voluptuous as vol + +from homeassistant.const import CONF_LANGUAGE +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import LanguageSelector, LanguageSelectorConfig + +from .const import ATTR_DATE, ATTR_NUSACH, DOMAIN, SERVICE_COUNT_OMER + +SUPPORTED_LANGUAGES = {"en": "english", "fr": "french", "he": "hebrew"} +OMER_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DATE, default=datetime.date.today): cv.date, + vol.Required(ATTR_NUSACH, default="sfarad"): vol.In( + [nusach.name.lower() for nusach in Nusach] + ), + vol.Required(CONF_LANGUAGE, default="he"): LanguageSelector( + LanguageSelectorConfig(languages=list(SUPPORTED_LANGUAGES.keys())) + ), + } +) + + +def async_setup_services(hass: HomeAssistant) -> None: + """Set up the Jewish Calendar services.""" + + async def get_omer_count(call: ServiceCall) -> ServiceResponse: + """Return the Omer blessing for a given date.""" + hebrew_date = HebrewDate.from_gdate(call.data["date"]) + nusach = Nusach[call.data["nusach"].upper()] + + # Currently Omer only supports Hebrew, English, and French and requires + # the full language name + language = cast(Language, SUPPORTED_LANGUAGES[call.data[CONF_LANGUAGE]]) + + omer = Omer(date=hebrew_date, nusach=nusach, language=language) + return { + "message": str(omer.count_str()), + "weeks": omer.week, + "days": omer.day, + "total_days": omer.total_days, + } + + hass.services.async_register( + DOMAIN, + SERVICE_COUNT_OMER, + get_omer_count, + schema=OMER_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/jewish_calendar/services.yaml b/homeassistant/components/jewish_calendar/services.yaml new file mode 100644 index 00000000000..b0fa2cfef6c --- /dev/null +++ b/homeassistant/components/jewish_calendar/services.yaml @@ -0,0 +1,28 @@ +count_omer: + fields: + date: + required: true + example: "2025-04-14" + selector: + date: + nusach: + example: "sfarad" + default: "sfarad" + selector: + select: + translation_key: "nusach" + options: + - "sfarad" + - "ashkenaz" + - "adot_mizrah" + - "italian" + language: + required: true + default: "he" + example: "he" + selector: + language: + languages: + - "en" + - "he" + - "fr" diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index 1b7b86c0056..41e666b1e5d 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -45,5 +45,35 @@ } } } + }, + "selector": { + "nusach": { + "options": { + "sfarad": "Sfarad", + "ashkenaz": "Ashkenaz", + "adot_mizrah": "Adot Mizrah", + "italian": "Italian" + } + } + }, + "services": { + "count_omer": { + "name": "Count the Omer", + "description": "Returns the phrase for counting the Omer on a given date.", + "fields": { + "date": { + "name": "Date", + "description": "Date to count the Omer for." + }, + "nusach": { + "name": "Nusach", + "description": "Nusach to count the Omer in." + }, + "language": { + "name": "Language", + "description": "Language to count the Omer in." + } + } + } } } diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index de8e521f0e8..2f876ca855d 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -12,14 +12,24 @@ from random import random import voluptuous as vol from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, async_import_statistics, get_last_statistics, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume +from homeassistant.const import ( + DEGREE, + Platform, + UnitOfEnergy, + UnitOfTemperature, + UnitOfVolume, +) from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -72,6 +82,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set the config entry up.""" + if "recorder" in hass.config.components: + # Insert stats for mean_type_changed issue + await _insert_wrong_wind_direction_statistics(hass) + # Set up demo platforms with config entry await hass.config_entries.async_forward_entry_setups( entry, COMPONENTS_WITH_DEMO_PLATFORM @@ -233,7 +247,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Outdoor temperature", "statistic_id": f"{DOMAIN}:temperature_outdoor", "unit_of_measurement": UnitOfTemperature.CELSIUS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -246,7 +260,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 1", "statistic_id": f"{DOMAIN}:energy_consumption_kwh", "unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 1) @@ -258,7 +272,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 2", "statistic_id": f"{DOMAIN}:energy_consumption_mwh", "unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -272,7 +286,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 1", "statistic_id": f"{DOMAIN}:gas_consumption_m3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -286,7 +300,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 2", "statistic_id": f"{DOMAIN}:gas_consumption_ft3", "unit_of_measurement": UnitOfVolume.CUBIC_FEET, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 15) @@ -298,7 +312,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_1", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -310,7 +324,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_2", "unit_of_measurement": "cats", - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -322,7 +336,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -334,8 +348,28 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_4", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) async_import_statistics(hass, metadata, statistics) + + +async def _insert_wrong_wind_direction_statistics(hass: HomeAssistant) -> None: + """Insert some fake wind direction statistics.""" + now = dt_util.now() + yesterday = now - datetime.timedelta(days=1) + yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0) + today_midnight = yesterday_midnight + datetime.timedelta(days=1) + + # Add some statistics required to raise the mean_type_changed issue later + metadata: StatisticMetaData = { + "source": RECORDER_DOMAIN, + "name": None, + "statistic_id": "sensor.statistics_issues_issue_5", + "unit_of_measurement": DEGREE, + "mean_type": StatisticMeanType.ARITHMETIC, + "has_sum": False, + } + statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 0, 360) + async_import_statistics(hass, metadata, statistics) diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index e1ffe334038..aa722d27944 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -17,6 +17,7 @@ from homeassistant.config_entries import ( SubentryFlowResult, ) from homeassistant.core import callback +from homeassistant.helpers import config_validation as cv from . import DOMAIN @@ -80,30 +81,30 @@ class OptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(data=self.config_entry.options | user_input) - return self.async_show_form( - step_id="options_1", - data_schema=vol.Schema( - { - vol.Required("section_1"): data_entry_flow.section( - vol.Schema( - { - vol.Optional( - CONF_BOOLEAN, - default=self.config_entry.options.get( - CONF_BOOLEAN, False - ), - ): bool, - vol.Optional( - CONF_INT, - default=self.config_entry.options.get(CONF_INT, 10), - ): int, - } - ), - {"collapsed": False}, + data_schema = vol.Schema( + { + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional( + CONF_BOOLEAN, + default=self.config_entry.options.get( + CONF_BOOLEAN, False + ), + ): bool, + vol.Optional(CONF_INT): cv.positive_int, + } ), - } - ), + {"collapsed": False}, + ), + } ) + self.add_suggested_values_to_schema( + data_schema, + {"section_1": {"int": self.config_entry.options.get(CONF_INT, 10)}}, + ) + + return self.async_show_form(step_id="options_1", data_schema=data_schema) class SubentryFlowHandler(ConfigSubentryFlow): @@ -146,7 +147,7 @@ class SubentryFlowHandler(ConfigSubentryFlow): if user_input is not None: title = user_input.pop("name") return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), data=user_input, title=title, diff --git a/homeassistant/components/kitchen_sink/sensor.py b/homeassistant/components/kitchen_sink/sensor.py index 19d1b31aeab..04cb833f0df 100644 --- a/homeassistant/components/kitchen_sink/sensor.py +++ b/homeassistant/components/kitchen_sink/sensor.py @@ -8,7 +8,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfPower +from homeassistant.const import DEGREE, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -87,6 +87,16 @@ async def async_setup_entry( state_class=None, unit_of_measurement=UnitOfPower.WATT, ), + DemoSensor( + device_unique_id="statistics_issues", + unique_id="statistics_issue_5", + device_name="Statistics issues", + entity_name="Issue 5", + state=100, + device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + unit_of_measurement=DEGREE, + ), ] ) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index fa3439b02f4..8ad16642e45 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -486,7 +486,7 @@ class KNXModule: transcoder := DPTBase.parse_transcoder(dpt) ): self._address_filter_transcoder.update( - {_filter: transcoder for _filter in _filters} + dict.fromkeys(_filters, transcoder) ) return self.xknx.telegram_queue.register_telegram_received_cb( diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 8cfb034a793..bde6dfa226f 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -11,8 +11,8 @@ "loggers": ["xknx", "xknxproject"], "requirements": [ "xknx==3.6.0", - "xknxproject==3.8.1", - "knx-frontend==2025.1.30.194235" + "xknxproject==3.8.2", + "knx-frontend==2025.3.8.214559" ], "single_config_entry": true } diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index f0f760180f4..fc28e0850ed 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -126,7 +126,7 @@ async def service_event_register_modify(call: ServiceCall) -> None: transcoder := DPTBase.parse_transcoder(dpt) ): knx_module.group_address_transcoder.update( - {_address: transcoder for _address in group_addresses} + dict.fromkeys(group_addresses, transcoder) ) for group_address in group_addresses: if group_address in knx_module.knx_event_callback.group_addresses: diff --git a/homeassistant/components/knx/storage/entity_store_schema.py b/homeassistant/components/knx/storage/entity_store_schema.py index d99ffa86f52..cde18a181ec 100644 --- a/homeassistant/components/knx/storage/entity_store_schema.py +++ b/homeassistant/components/knx/storage/entity_store_schema.py @@ -114,7 +114,7 @@ BINARY_SENSOR_SCHEMA = vol.Schema( ), vol.Optional(CONF_RESET_AFTER): selector.NumberSelector( selector.NumberSelectorConfig( - min=0, max=10, step=0.1, unit_of_measurement="s" + min=0, max=600, step=0.1, unit_of_measurement="s" ) ), }, diff --git a/homeassistant/components/lacrosse_view/sensor.py b/homeassistant/components/lacrosse_view/sensor.py index 667fcbb8dcc..dde8dfd54a2 100644 --- a/homeassistant/components/lacrosse_view/sensor.py +++ b/homeassistant/components/lacrosse_view/sensor.py @@ -106,6 +106,7 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=DEGREE, suggested_display_precision=2, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WetDry": LaCrosseSensorEntityDescription( key="WetDry", diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index d20616e1940..25c8fd1091e 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -61,6 +61,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - client=client, ) + # initialize the firmware update coordinator early to check the firmware version + firmware_device = LaMarzoccoMachine( + model=entry.data[CONF_MODEL], + serial_number=entry.unique_id, + name=entry.data[CONF_NAME], + cloud_client=cloud_client, + ) + + firmware_coordinator = LaMarzoccoFirmwareUpdateCoordinator( + hass, entry, firmware_device + ) + await firmware_coordinator.async_config_entry_first_refresh() + gateway_version = version.parse( + firmware_device.firmware[FirmwareType.GATEWAY].current_version + ) + + if gateway_version >= version.parse("v5.0.9"): + # remove host from config entry, it is not supported anymore + data = {k: v for k, v in entry.data.items() if k != CONF_HOST} + hass.config_entries.async_update_entry( + entry, + data=data, + ) + + elif gateway_version < version.parse("v3.4-rc5"): + # incompatible gateway firmware, create an issue + ir.async_create_issue( + hass, + DOMAIN, + "unsupported_gateway_firmware", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="unsupported_gateway_firmware", + translation_placeholders={"gateway_version": str(gateway_version)}, + ) + # initialize local API local_client: LaMarzoccoLocalClient | None = None if (host := entry.data.get(CONF_HOST)) is not None: @@ -117,30 +153,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - coordinators = LaMarzoccoRuntimeData( LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client), - LaMarzoccoFirmwareUpdateCoordinator(hass, entry, device), + firmware_coordinator, LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device), ) # API does not like concurrent requests, so no asyncio.gather here await coordinators.config_coordinator.async_config_entry_first_refresh() - await coordinators.firmware_coordinator.async_config_entry_first_refresh() await coordinators.statistics_coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinators - gateway_version = device.firmware[FirmwareType.GATEWAY].current_version - if version.parse(gateway_version) < version.parse("v3.4-rc5"): - # incompatible gateway firmware, create an issue - ir.async_create_issue( - hass, - DOMAIN, - "unsupported_gateway_firmware", - is_fixable=False, - severity=ir.IssueSeverity.ERROR, - translation_key="unsupported_gateway_firmware", - translation_placeholders={"gateway_version": gateway_version}, - ) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) async def update_listener( diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index eceb2bbf53b..73f00b2bdd0 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.7"] + "requirements": ["pylamarzocco==1.4.9"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index 666c57c1866..08e9ad7e590 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -144,9 +144,12 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( set_value_fn=lambda machine, value, key: machine.set_prebrew_time( prebrew_off_time=value, key=key ), - native_value_fn=lambda config, key: config.prebrew_configuration[key].off_time, + native_value_fn=lambda config, key: config.prebrew_configuration[key][ + 0 + ].off_time, available_fn=lambda device: len(device.config.prebrew_configuration) > 0 - and device.config.prebrew_mode == PrebrewMode.PREBREW, + and device.config.prebrew_mode + in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED), supported_fn=lambda coordinator: coordinator.device.model != MachineModel.GS3_MP, ), @@ -162,9 +165,12 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( set_value_fn=lambda machine, value, key: machine.set_prebrew_time( prebrew_on_time=value, key=key ), - native_value_fn=lambda config, key: config.prebrew_configuration[key].off_time, + native_value_fn=lambda config, key: config.prebrew_configuration[key][ + 0 + ].off_time, available_fn=lambda device: len(device.config.prebrew_configuration) > 0 - and device.config.prebrew_mode == PrebrewMode.PREBREW, + and device.config.prebrew_mode + in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED), supported_fn=lambda coordinator: coordinator.device.model != MachineModel.GS3_MP, ), @@ -180,8 +186,8 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( set_value_fn=lambda machine, value, key: machine.set_preinfusion_time( preinfusion_time=value, key=key ), - native_value_fn=lambda config, key: config.prebrew_configuration[ - key + native_value_fn=lambda config, key: config.prebrew_configuration[key][ + 1 ].preinfusion_time, available_fn=lambda device: len(device.config.prebrew_configuration) > 0 and device.config.prebrew_mode == PrebrewMode.PREINFUSION, diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index d8217cefaff..5ebe2d7b9da 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -38,6 +38,7 @@ STEAM_LEVEL_LM_TO_HA = {value: key for key, value in STEAM_LEVEL_HA_TO_LM.items( PREBREW_MODE_HA_TO_LM = { "disabled": PrebrewMode.DISABLED, "prebrew": PrebrewMode.PREBREW, + "prebrew_enabled": PrebrewMode.PREBREW_ENABLED, "preinfusion": PrebrewMode.PREINFUSION, } diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 62050685c27..04853b8d0ca 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -148,6 +148,7 @@ "state": { "disabled": "Disabled", "prebrew": "Prebrew", + "prebrew_enabled": "Prebrew", "preinfusion": "Preinfusion" } }, diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index 0e1f680dd63..ca40aebd0d4 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pylast import LastFMNetwork, PyLastError, User, WSError @@ -32,6 +33,8 @@ CONFIG_SCHEMA: vol.Schema = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: """Get and validate lastFM User.""" @@ -49,7 +52,8 @@ def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: errors["base"] = "invalid_auth" else: errors["base"] = "unknown" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return user, errors diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 0bdd85a3678..0a8112d997a 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -396,19 +396,19 @@ }, "address_to_device_id": { "name": "Address to device ID", - "description": "Convert LCN address to device ID.", + "description": "Converts an LCN address into a device ID.", "fields": { "id": { "name": "Module or group ID", - "description": "Target module or group ID." + "description": "Module or group number of the target." }, "segment_id": { "name": "Segment ID", - "description": "Target segment ID." + "description": "Segment number of the target." }, "type": { "name": "Type", - "description": "Target type." + "description": "Module type of the target." }, "host": { "name": "Host name", diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index c92bcb3294f..1896f2109a7 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.25.0", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.26.1", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/light.py b/homeassistant/components/led_ble/light.py index 14f2f228e13..2facda734d5 100644 --- a/homeassistant/components/led_ble/light.py +++ b/homeassistant/components/led_ble/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from led_ble import LEDBLE @@ -83,7 +83,7 @@ class LEDBLEEntity(CoordinatorEntity[DataUpdateCoordinator[None]], LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) if effect := kwargs.get(ATTR_EFFECT): await self._async_set_effect(effect, brightness) return diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 8f624a3c225..270495c8770 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.25.0", "led-ble==1.1.6"] + "requirements": ["bluetooth-data-tools==1.26.1", "led-ble==1.1.6"] } diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json index 3b4417c346a..eb0203e0661 100644 --- a/homeassistant/components/lektrico/strings.json +++ b/homeassistant/components/lektrico/strings.json @@ -24,7 +24,7 @@ "entity": { "binary_sensor": { "state_e_activated": { - "name": "Ev error" + "name": "EV error" }, "overtemp": { "name": "Thermal throttling" @@ -45,10 +45,10 @@ "name": "Overvoltage" }, "rcd_error": { - "name": "Rcd error" + "name": "RCD error" }, "cp_diode_failure": { - "name": "Ev diode short" + "name": "EV diode short" }, "contactor_failure": { "name": "Relay contacts welded" @@ -64,7 +64,7 @@ }, "number": { "led_max_brightness": { - "name": "Led brightness" + "name": "LED brightness" }, "dynamic_limit": { "name": "Dynamic limit" diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index b00d28c1d4f..cffc61cb1c4 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lg_thinq", "iot_class": "cloud_push", "loggers": ["thinqconnect"], - "requirements": ["thinqconnect==1.0.4"] + "requirements": ["thinqconnect==1.0.5"] } diff --git a/homeassistant/components/lifx/strings.json b/homeassistant/components/lifx/strings.json index c407489d52d..be0485c6dff 100644 --- a/homeassistant/components/lifx/strings.json +++ b/homeassistant/components/lifx/strings.json @@ -201,7 +201,7 @@ }, "effect_morph": { "name": "Morph effect", - "description": "Starts the firmware-based Morph effect on LIFX Tiles on Candle.", + "description": "Starts the firmware-based Morph effect on LIFX Tiles or Candle.", "fields": { "speed": { "name": "Speed", @@ -223,23 +223,23 @@ }, "effect_sky": { "name": "Sky effect", - "description": "Starts the firmware-based Sky effect on LIFX Ceiling.", + "description": "Starts a firmware-based effect on LIFX Ceiling lights that animates a sky scene across the device.", "fields": { "speed": { "name": "Speed", - "description": "How long the Sunrise and Sunset sky types will take to complete. For the Cloud sky type, it is the speed of the clouds across the device." + "description": "How long the Sunrise and Sunset sky types will take to complete. For the Clouds sky type, it is the speed of the clouds across the device." }, "sky_type": { "name": "Sky type", "description": "The style of sky that will be animated by the effect." }, "cloud_saturation_min": { - "name": "Cloud saturation Minimum", - "description": "Minimum cloud saturation." + "name": "Cloud saturation minimum", + "description": "The minimum cloud saturation for the Clouds sky type." }, "cloud_saturation_max": { - "name": "Cloud Saturation maximum", - "description": "Maximum cloud saturation." + "name": "Cloud saturation maximum", + "description": "The maximum cloud saturation for the Clouds sky type." }, "palette": { "name": "Palette", diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 637ba45c7d9..7b548533058 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -465,7 +465,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: ): params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) - brightness = params.get(ATTR_BRIGHTNESS, light.brightness) + brightness = cast(int, params.get(ATTR_BRIGHTNESS, light.brightness)) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( color_temp, brightness, diff --git a/homeassistant/components/linkplay/entity.py b/homeassistant/components/linkplay/entity.py index 74e067f5eb3..0bfb34af42c 100644 --- a/homeassistant/components/linkplay/entity.py +++ b/homeassistant/components/linkplay/entity.py @@ -4,13 +4,13 @@ from collections.abc import Callable, Coroutine from typing import Any, Concatenate from linkplay.bridge import LinkPlayBridge +from linkplay.manufacturers import MANUFACTURER_GENERIC, get_info_from_project from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity import Entity from . import DOMAIN, LinkPlayRequestException -from .utils import MANUFACTURER_GENERIC, get_info_from_project def exception_wrap[_LinkPlayEntityT: LinkPlayBaseEntity, **_P, _R]( diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index ec9a8759a30..0941f2fbe61 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.1.3"], + "requirements": ["python-linkplay==0.2.1"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index b27616f1e09..16b0d5f75f1 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -86,16 +86,10 @@ REPEAT_MAP: dict[LoopMode, RepeatMode] = { REPEAT_MAP_INV: dict[RepeatMode, LoopMode] = {v: k for k, v in REPEAT_MAP.items()} -EQUALIZER_MAP: dict[EqualizerMode, str] = { - EqualizerMode.NONE: "None", - EqualizerMode.CLASSIC: "Classic", - EqualizerMode.POP: "Pop", - EqualizerMode.JAZZ: "Jazz", - EqualizerMode.VOCAL: "Vocal", +EQUALIZER_MAP_INV: dict[str, EqualizerMode] = { + mode.value: mode for mode in EqualizerMode } -EQUALIZER_MAP_INV: dict[str, EqualizerMode] = {v: k for k, v in EQUALIZER_MAP.items()} - DEFAULT_FEATURES: MediaPlayerEntityFeature = ( MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA @@ -148,7 +142,6 @@ async def async_setup_entry( class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): """Representation of a LinkPlay media player.""" - _attr_sound_mode_list = list(EQUALIZER_MAP.values()) _attr_device_class = MediaPlayerDeviceClass.RECEIVER _attr_media_content_type = MediaType.MUSIC _attr_name = None @@ -163,6 +156,9 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): self._attr_source_list = [ SOURCE_MAP[playing_mode] for playing_mode in bridge.device.playmode_support ] + self._attr_sound_mode_list = [ + mode.value for mode in bridge.player.available_equalizer_modes + ] @exception_wrap async def async_update(self) -> None: @@ -348,7 +344,7 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): self._attr_is_volume_muted = self._bridge.player.muted self._attr_repeat = REPEAT_MAP[self._bridge.player.loop_mode] self._attr_shuffle = self._bridge.player.loop_mode == LoopMode.RANDOM_PLAYBACK - self._attr_sound_mode = EQUALIZER_MAP[self._bridge.player.equalizer_mode] + self._attr_sound_mode = self._bridge.player.equalizer_mode.value self._attr_supported_features = DEFAULT_FEATURES if self._bridge.player.status == PlayingStatus.PLAYING: diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 7151ed1537a..63d04a3afc4 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -1,7 +1,5 @@ """Utilities for the LinkPlay component.""" -from typing import Final - from aiohttp import ClientSession from linkplay.utils import async_create_unverified_client_session @@ -10,75 +8,6 @@ from homeassistant.core import Event, HomeAssistant, callback from .const import DATA_SESSION, DOMAIN -MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" -MANUFACTURER_ARYLIC: Final[str] = "Arylic" -MANUFACTURER_IEAST: Final[str] = "iEAST" -MANUFACTURER_WIIM: Final[str] = "WiiM" -MANUFACTURER_GGMM: Final[str] = "GGMM" -MANUFACTURER_MEDION: Final[str] = "Medion" -MANUFACTURER_GENERIC: Final[str] = "Generic" -MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" -MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" -MODELS_ARYLIC_S50: Final[str] = "S50+" -MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" -MODELS_ARYLIC_A30: Final[str] = "A30" -MODELS_ARYLIC_A50: Final[str] = "A50" -MODELS_ARYLIC_A50S: Final[str] = "A50+" -MODELS_ARYLIC_UP2STREAM_AMP: Final[str] = "Up2Stream Amp 2.0" -MODELS_ARYLIC_UP2STREAM_AMP_2P1: Final[str] = "Up2Stream Amp 2.1" -MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" -MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" -MODELS_ARYLIC_UP2STREAM_PRO: Final[str] = "Up2Stream Pro v1" -MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" -MODELS_ARYLIC_S10P: Final[str] = "Arylic S10+" -MODELS_ARYLIC_UP2STREAM_PLATE_AMP: Final[str] = "Up2Stream Plate Amp" -MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" -MODELS_WIIM_AMP: Final[str] = "WiiM Amp" -MODELS_WIIM_MINI: Final[str] = "WiiM Mini" -MODELS_GGMM_GGMM_E2: Final[str] = "GGMM E2" -MODELS_MEDION_MD_43970: Final[str] = "Life P66970 (MD 43970)" -MODELS_GENERIC: Final[str] = "Generic" - -PROJECTID_LOOKUP: Final[dict[str, tuple[str, str]]] = { - "SMART_ZONE4_AMP": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4), - "SMART_HYDE": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE), - "ARYLIC_S50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50), - "RP0016_S50PRO_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO), - "RP0011_WB60_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30), - "X-50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50), - "ARYLIC_A50S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S), - "RP0011_WB60": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP), - "UP2STREAM_AMP_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3), - "UP2STREAM_AMP_V4": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4), - "UP2STREAM_PRO_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3), - "S10P_WIFI": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S10P), - "ARYLIC_V20": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PLATE_AMP), - "UP2STREAM_MINI_V3": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "UP2STREAM_AMP_2P1": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_2P1), - "RP0014_A50C_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A30": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_SUBWOOFER": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_S50A": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0010_D5_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0001": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0013_WA31S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0010_D5": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0013_WA31S_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0014_A50D_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A50TE": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A50N": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "iEAST-02": (MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5), - "WiiM_Amp_4layer": (MANUFACTURER_WIIM, MODELS_WIIM_AMP), - "Muzo_Mini": (MANUFACTURER_WIIM, MODELS_WIIM_MINI), - "GGMM_E2A": (MANUFACTURER_GGMM, MODELS_GGMM_GGMM_E2), - "A16": (MANUFACTURER_MEDION, MODELS_MEDION_MD_43970), -} - - -def get_info_from_project(project: str) -> tuple[str, str]: - """Get manufacturer and model info based on given project.""" - return PROJECTID_LOOKUP.get(project, (MANUFACTURER_GENERIC, MODELS_GENERIC)) - async def async_get_client_session(hass: HomeAssistant) -> ClientSession: """Get a ClientSession that can be used with LinkPlay devices.""" diff --git a/homeassistant/components/litterrobot/strings.json b/homeassistant/components/litterrobot/strings.json index 19b007de068..052427f3032 100644 --- a/homeassistant/components/litterrobot/strings.json +++ b/homeassistant/components/litterrobot/strings.json @@ -77,31 +77,31 @@ "status_code": { "name": "Status code", "state": { - "br": "Bonnet Removed", - "ccc": "Clean Cycle Complete", - "ccp": "Clean Cycle In Progress", - "cd": "Cat Detected", - "csf": "Cat Sensor Fault", - "csi": "Cat Sensor Interrupted", - "cst": "Cat Sensor Timing", - "df1": "Drawer Almost Full - 2 Cycles Left", - "df2": "Drawer Almost Full - 1 Cycle Left", - "dfs": "Drawer Full", - "dhf": "Dump + Home Position Fault", - "dpf": "Dump Position Fault", - "ec": "Empty Cycle", - "hpf": "Home Position Fault", + "br": "Bonnet removed", + "ccc": "Clean cycle complete", + "ccp": "Clean cycle in progress", + "cd": "Cat detected", + "csf": "Cat sensor fault", + "csi": "Cat sensor interrupted", + "cst": "Cat sensor timing", + "df1": "Drawer almost full - 2 cycles left", + "df2": "Drawer almost full - 1 cycle left", + "dfs": "Drawer full", + "dhf": "Dump + home position fault", + "dpf": "Dump position fault", + "ec": "Empty cycle", + "hpf": "Home position fault", "off": "[%key:common::state::off%]", "offline": "Offline", - "otf": "Over Torque Fault", + "otf": "Over torque fault", "p": "[%key:common::state::paused%]", - "pd": "Pinch Detect", - "pwrd": "Powering Down", - "pwru": "Powering Up", + "pd": "Pinch detect", + "pwrd": "Powering down", + "pwru": "Powering up", "rdy": "Ready", - "scf": "Cat Sensor Fault At Startup", - "sdf": "Drawer Full At Startup", - "spf": "Pinch Detect At Startup" + "scf": "Cat sensor fault at startup", + "sdf": "Drawer full at startup", + "spf": "Pinch detect at startup" } }, "waste_drawer": { diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 21a4134a8b6..fc6d0bc00c7 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==8.3.0"] + "requirements": ["ical==9.0.1"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 68154f10885..27d3ccce4a7 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==8.3.0"] + "requirements": ["ical==9.0.1"] } diff --git a/homeassistant/components/local_todo/strings.json b/homeassistant/components/local_todo/strings.json index 2403fae60a5..ebf7810494c 100644 --- a/homeassistant/components/local_todo/strings.json +++ b/homeassistant/components/local_todo/strings.json @@ -6,7 +6,8 @@ "description": "Please choose a name for your new To-do list", "data": { "todo_list_name": "To-do list name" - } + }, + "submit": "Create" } }, "abort": { diff --git a/homeassistant/components/logger/helpers.py b/homeassistant/components/logger/helpers.py index 034266428a3..00cea7e8aa5 100644 --- a/homeassistant/components/logger/helpers.py +++ b/homeassistant/components/logger/helpers.py @@ -203,7 +203,7 @@ class LoggerSettings: else: loggers = {domain} - combined_logs = {logger: LOGSEVERITY[settings.level] for logger in loggers} + combined_logs = dict.fromkeys(loggers, LOGSEVERITY[settings.level]) # Don't override the log levels with the ones from YAML # since we want whatever the user is asking for to be honored. diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index d697d6244b5..b489fe9dba7 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import contextlib from itertools import chain import logging import ssl @@ -37,11 +36,12 @@ from .const import ( ATTR_SERIAL, ATTR_TYPE, BRIDGE_DEVICE_ID, - BRIDGE_TIMEOUT, CONF_CA_CERTS, CONF_CERTFILE, CONF_KEYFILE, CONF_SUBTYPE, + CONFIGURE_TIMEOUT, + CONNECT_TIMEOUT, DOMAIN, LUTRON_CASETA_BUTTON_EVENT, MANUFACTURER, @@ -161,28 +161,40 @@ async def async_setup_entry( keyfile = hass.config.path(entry.data[CONF_KEYFILE]) certfile = hass.config.path(entry.data[CONF_CERTFILE]) ca_certs = hass.config.path(entry.data[CONF_CA_CERTS]) - bridge = None + connected_future: asyncio.Future[None] = hass.loop.create_future() + + def _on_connect() -> None: + nonlocal connected_future + if not connected_future.done(): + connected_future.set_result(None) try: bridge = Smartbridge.create_tls( - hostname=host, keyfile=keyfile, certfile=certfile, ca_certs=ca_certs + hostname=host, + keyfile=keyfile, + certfile=certfile, + ca_certs=ca_certs, + on_connect_callback=_on_connect, ) except ssl.SSLError: _LOGGER.error("Invalid certificate used to connect to bridge at %s", host) return False - timed_out = True - with contextlib.suppress(TimeoutError): - async with asyncio.timeout(BRIDGE_TIMEOUT): - await bridge.connect() - timed_out = False + connect_task = hass.async_create_task(bridge.connect()) + for future, name, timeout in ( + (connected_future, "connect", CONNECT_TIMEOUT), + (connect_task, "configure", CONFIGURE_TIMEOUT), + ): + try: + async with asyncio.timeout(timeout): + await future + except TimeoutError as ex: + connect_task.cancel() + await bridge.close() + raise ConfigEntryNotReady(f"Timed out on {name} for {host}") from ex - if timed_out or not bridge.is_connected(): - await bridge.close() - if timed_out: - raise ConfigEntryNotReady(f"Timed out while trying to connect to {host}") - if not bridge.is_connected(): - raise ConfigEntryNotReady(f"Cannot connect to {host}") + if not bridge.is_connected(): + raise ConfigEntryNotReady(f"Connection failed to {host}") _LOGGER.debug("Connected to Lutron Caseta bridge via LEAP at %s", host) await _async_migrate_unique_ids(hass, entry) diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index 767c3d2f2b7..45e7a04bdc9 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -20,10 +20,11 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import ( ABORT_REASON_CANNOT_CONNECT, BRIDGE_DEVICE_ID, - BRIDGE_TIMEOUT, CONF_CA_CERTS, CONF_CERTFILE, CONF_KEYFILE, + CONFIGURE_TIMEOUT, + CONNECT_TIMEOUT, DOMAIN, ERROR_CANNOT_CONNECT, STEP_IMPORT_FAILED, @@ -232,7 +233,7 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): return None try: - async with asyncio.timeout(BRIDGE_TIMEOUT): + async with asyncio.timeout(CONNECT_TIMEOUT + CONFIGURE_TIMEOUT): await bridge.connect() except TimeoutError: _LOGGER.error( diff --git a/homeassistant/components/lutron_caseta/const.py b/homeassistant/components/lutron_caseta/const.py index 809b9e8d007..26a83de6f4b 100644 --- a/homeassistant/components/lutron_caseta/const.py +++ b/homeassistant/components/lutron_caseta/const.py @@ -34,7 +34,8 @@ ACTION_RELEASE = "release" CONF_SUBTYPE = "subtype" -BRIDGE_TIMEOUT = 35 +CONNECT_TIMEOUT = 9 +CONFIGURE_TIMEOUT = 50 UNASSIGNED_AREA = "Unassigned" diff --git a/homeassistant/components/lutron_caseta/cover.py b/homeassistant/components/lutron_caseta/cover.py index 3727dbf17ba..e05fddb996f 100644 --- a/homeassistant/components/lutron_caseta/cover.py +++ b/homeassistant/components/lutron_caseta/cover.py @@ -108,6 +108,7 @@ PYLUTRON_TYPE_TO_CLASSES = { "QsWirelessHorizontalSheerBlind": LutronCasetaShade, "Shade": LutronCasetaShade, "PalladiomWireFreeShade": LutronCasetaShade, + "SerenaEssentialsRollerShade": LutronCasetaShade, } diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index bbb6df41a89..96b00a1f392 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -9,7 +9,7 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.23.0"], + "requirements": ["pylutron-caseta==0.24.0"], "zeroconf": [ { "type": "_lutron._tcp.local.", diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index ab8514c8321..17b8614a2e9 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from mastodon.Mastodon import Mastodon, MastodonError +from mastodon.Mastodon import Account, Instance, InstanceV2, Mastodon, MastodonError from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -107,7 +107,9 @@ async def async_migrate_entry(hass: HomeAssistant, entry: MastodonConfigEntry) - return True -def setup_mastodon(entry: MastodonConfigEntry) -> tuple[Mastodon, dict, dict]: +def setup_mastodon( + entry: MastodonConfigEntry, +) -> tuple[Mastodon, InstanceV2 | Instance, Account]: """Get mastodon details.""" client = create_mastodon_client( entry.data[CONF_BASE_URL], diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 1b93cbecd98..1ae1e6b229e 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -4,7 +4,12 @@ from __future__ import annotations from typing import Any -from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError +from mastodon.Mastodon import ( + Account, + Instance, + MastodonNetworkError, + MastodonUnauthorizedError, +) import voluptuous as vol from yarl import URL @@ -56,8 +61,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): client_secret: str, access_token: str, ) -> tuple[ - dict[str, str] | None, - dict[str, str] | None, + Instance | None, + Account | None, dict[str, str], ]: """Check connection to the Mastodon instance.""" diff --git a/homeassistant/components/mastodon/const.py b/homeassistant/components/mastodon/const.py index a4af49a27a6..2efda329467 100644 --- a/homeassistant/components/mastodon/const.py +++ b/homeassistant/components/mastodon/const.py @@ -12,14 +12,6 @@ DATA_HASS_CONFIG = "mastodon_hass_config" DEFAULT_URL: Final = "https://mastodon.social" DEFAULT_NAME: Final = "Mastodon" -INSTANCE_VERSION: Final = "version" -INSTANCE_URI: Final = "uri" -INSTANCE_DOMAIN: Final = "domain" -ACCOUNT_USERNAME: Final = "username" -ACCOUNT_FOLLOWERS_COUNT: Final = "followers_count" -ACCOUNT_FOLLOWING_COUNT: Final = "following_count" -ACCOUNT_STATUSES_COUNT: Final = "statuses_count" - ATTR_CONFIG_ENTRY_ID = "config_entry_id" ATTR_STATUS = "status" ATTR_VISIBILITY = "visibility" diff --git a/homeassistant/components/mastodon/coordinator.py b/homeassistant/components/mastodon/coordinator.py index 5d2b193b4a8..99785eca80b 100644 --- a/homeassistant/components/mastodon/coordinator.py +++ b/homeassistant/components/mastodon/coordinator.py @@ -4,10 +4,9 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from typing import Any from mastodon import Mastodon -from mastodon.Mastodon import MastodonError +from mastodon.Mastodon import Account, Instance, InstanceV2, MastodonError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -21,15 +20,15 @@ class MastodonData: """Mastodon data type.""" client: Mastodon - instance: dict - account: dict + instance: InstanceV2 | Instance + account: Account coordinator: MastodonCoordinator type MastodonConfigEntry = ConfigEntry[MastodonData] -class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): +class MastodonCoordinator(DataUpdateCoordinator[Account]): """Class to manage fetching Mastodon data.""" config_entry: MastodonConfigEntry @@ -47,9 +46,9 @@ class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) self.client = client - async def _async_update_data(self) -> dict[str, Any]: + async def _async_update_data(self) -> Account: try: - account: dict = await self.hass.async_add_executor_job( + account: Account = await self.hass.async_add_executor_job( self.client.account_verify_credentials ) except MastodonError as ex: diff --git a/homeassistant/components/mastodon/diagnostics.py b/homeassistant/components/mastodon/diagnostics.py index dc7c1b785ab..31444413dfd 100644 --- a/homeassistant/components/mastodon/diagnostics.py +++ b/homeassistant/components/mastodon/diagnostics.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from mastodon.Mastodon import Account, Instance + from homeassistant.core import HomeAssistant from .coordinator import MastodonConfigEntry @@ -25,7 +27,7 @@ async def async_get_config_entry_diagnostics( } -def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[dict, dict]: +def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[Instance, Account]: """Get mastodon diagnostics.""" client = config_entry.runtime_data.client diff --git a/homeassistant/components/mastodon/entity.py b/homeassistant/components/mastodon/entity.py index 2ae8c0d852e..60224e75e41 100644 --- a/homeassistant/components/mastodon/entity.py +++ b/homeassistant/components/mastodon/entity.py @@ -4,7 +4,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DEFAULT_NAME, DOMAIN, INSTANCE_VERSION +from .const import DEFAULT_NAME, DOMAIN from .coordinator import MastodonConfigEntry, MastodonCoordinator from .utils import construct_mastodon_username @@ -40,7 +40,7 @@ class MastodonEntity(CoordinatorEntity[MastodonCoordinator]): manufacturer="Mastodon gGmbH", model=full_account_name, entry_type=DeviceEntryType.SERVICE, - sw_version=data.runtime_data.instance[INSTANCE_VERSION], + sw_version=data.runtime_data.instance.version, name=name, ) diff --git a/homeassistant/components/mastodon/notify.py b/homeassistant/components/mastodon/notify.py index 8af98ec3ab1..149ef1f6a48 100644 --- a/homeassistant/components/mastodon/notify.py +++ b/homeassistant/components/mastodon/notify.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Any, cast from mastodon import Mastodon -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import voluptuous as vol from homeassistant.components.notify import ( @@ -114,7 +114,7 @@ class MastodonNotificationService(BaseNotificationService): message, visibility=target, spoiler_text=content_warning, - media_ids=mediadata["id"], + media_ids=mediadata.id, sensitive=sensitive, ) except MastodonAPIError as err: @@ -134,12 +134,14 @@ class MastodonNotificationService(BaseNotificationService): translation_key="unable_to_send_message", ) from err - def _upload_media(self, media_path: Any = None) -> Any: + def _upload_media(self, media_path: Any = None) -> MediaAttachment: """Upload media.""" with open(media_path, "rb"): media_type = get_media_type(media_path) try: - mediadata = self.client.media_post(media_path, mime_type=media_type) + mediadata: MediaAttachment = self.client.media_post( + media_path, mime_type=media_type + ) except MastodonAPIError as err: raise HomeAssistantError( translation_domain=DOMAIN, diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml index 43636ed6924..f07f7e0a8ad 100644 --- a/homeassistant/components/mastodon/quality_scale.yaml +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -93,7 +93,4 @@ rules: # Platinum async-dependency: todo inject-websession: todo - strict-typing: - status: todo - comment: | - Requirement 'Mastodon.py==1.8.1' appears untyped + strict-typing: done diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py index 74537e33cae..bfdc9c90333 100644 --- a/homeassistant/components/mastodon/sensor.py +++ b/homeassistant/components/mastodon/sensor.py @@ -4,7 +4,8 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any + +from mastodon.Mastodon import Account from homeassistant.components.sensor import ( SensorEntity, @@ -15,11 +16,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import ( - ACCOUNT_FOLLOWERS_COUNT, - ACCOUNT_FOLLOWING_COUNT, - ACCOUNT_STATUSES_COUNT, -) from .coordinator import MastodonConfigEntry from .entity import MastodonEntity @@ -31,7 +27,7 @@ PARALLEL_UPDATES = 0 class MastodonSensorEntityDescription(SensorEntityDescription): """Describes Mastodon sensor entity.""" - value_fn: Callable[[dict[str, Any]], StateType] + value_fn: Callable[[Account], StateType] ENTITY_DESCRIPTIONS = ( @@ -39,19 +35,19 @@ ENTITY_DESCRIPTIONS = ( key="followers", translation_key="followers", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), + value_fn=lambda data: data.followers_count, ), MastodonSensorEntityDescription( key="following", translation_key="following", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), + value_fn=lambda data: data.following_count, ), MastodonSensorEntityDescription( key="posts", translation_key="posts", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), + value_fn=lambda data: data.statuses_count, ), ) diff --git a/homeassistant/components/mastodon/services.py b/homeassistant/components/mastodon/services.py index 7ab351f8c29..68e95e726a1 100644 --- a/homeassistant/components/mastodon/services.py +++ b/homeassistant/components/mastodon/services.py @@ -5,7 +5,7 @@ from functools import partial from typing import Any, cast from mastodon import Mastodon -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import voluptuous as vol from homeassistant.config_entries import ConfigEntryState @@ -104,7 +104,7 @@ def setup_services(hass: HomeAssistant) -> None: def _post(client: Mastodon, **kwargs: Any) -> None: """Post to Mastodon.""" - media_data: dict[str, Any] | None = None + media_data: MediaAttachment | None = None media_path = kwargs.get("media_path") if media_path: @@ -137,7 +137,7 @@ def setup_services(hass: HomeAssistant) -> None: try: media_ids: str | None = None if media_data: - media_ids = media_data["id"] + media_ids = media_data.id client.status_post(media_ids=media_ids, **kwargs) except MastodonAPIError as err: raise HomeAssistantError( diff --git a/homeassistant/components/mastodon/utils.py b/homeassistant/components/mastodon/utils.py index e9c2567b675..898578c931b 100644 --- a/homeassistant/components/mastodon/utils.py +++ b/homeassistant/components/mastodon/utils.py @@ -6,8 +6,9 @@ import mimetypes from typing import Any from mastodon import Mastodon +from mastodon.Mastodon import Account, Instance, InstanceV2 -from .const import ACCOUNT_USERNAME, DEFAULT_NAME, INSTANCE_DOMAIN, INSTANCE_URI +from .const import DEFAULT_NAME def create_mastodon_client( @@ -23,14 +24,13 @@ def create_mastodon_client( def construct_mastodon_username( - instance: dict[str, str] | None, account: dict[str, str] | None + instance: InstanceV2 | Instance | None, account: Account | None ) -> str: """Construct a mastodon username from the account and instance.""" if instance and account: - return ( - f"@{account[ACCOUNT_USERNAME]}@" - f"{instance.get(INSTANCE_URI, instance.get(INSTANCE_DOMAIN))}" - ) + if type(instance) is InstanceV2: + return f"@{account.username}@{instance.domain}" + return f"@{account.username}@{instance.uri}" return DEFAULT_NAME diff --git a/homeassistant/components/matter/number.py b/homeassistant/components/matter/number.py index 44538f46856..2c7a9651c60 100644 --- a/homeassistant/components/matter/number.py +++ b/homeassistant/components/matter/number.py @@ -169,8 +169,8 @@ DISCOVERY_SCHEMAS = [ device_class=NumberDeviceClass.TEMPERATURE, entity_category=EntityCategory.CONFIG, translation_key="temperature_offset", - native_max_value=25, - native_min_value=-25, + native_max_value=50, + native_min_value=-50, native_step=0.5, native_unit_of_measurement=UnitOfTemperature.CELSIUS, measurement_to_ha=lambda x: None if x is None else x / 10, diff --git a/homeassistant/components/mcp/__init__.py b/homeassistant/components/mcp/__init__.py index 4a2b4da990d..41b6a260d9f 100644 --- a/homeassistant/components/mcp/__init__.py +++ b/homeassistant/components/mcp/__init__.py @@ -39,7 +39,6 @@ async def async_setup_entry( entry.async_on_unload(unsub) entry.runtime_data = coordinator - entry.async_on_unload(coordinator.close) return True diff --git a/homeassistant/components/mcp/coordinator.py b/homeassistant/components/mcp/coordinator.py index a5c5ee55dbf..6e66036c548 100644 --- a/homeassistant/components/mcp/coordinator.py +++ b/homeassistant/components/mcp/coordinator.py @@ -40,6 +40,7 @@ async def mcp_client(url: str) -> AsyncGenerator[ClientSession]: await session.initialize() yield session except ExceptionGroup as err: + _LOGGER.debug("Error creating MCP client: %s", err) raise err.exceptions[0] from err @@ -51,13 +52,13 @@ class ModelContextProtocolTool(llm.Tool): name: str, description: str | None, parameters: vol.Schema, - session: ClientSession, + server_url: str, ) -> None: """Initialize the tool.""" self.name = name self.description = description self.parameters = parameters - self.session = session + self.server_url = server_url async def async_call( self, @@ -67,10 +68,16 @@ class ModelContextProtocolTool(llm.Tool): ) -> JsonObjectType: """Call the tool.""" try: - result = await self.session.call_tool( - tool_input.tool_name, tool_input.tool_args - ) + async with asyncio.timeout(TIMEOUT): + async with mcp_client(self.server_url) as session: + result = await session.call_tool( + tool_input.tool_name, tool_input.tool_args + ) + except TimeoutError as error: + _LOGGER.debug("Timeout when calling tool: %s", error) + raise HomeAssistantError(f"Timeout when calling tool: {error}") from error except httpx.HTTPStatusError as error: + _LOGGER.debug("Error when calling tool: %s", error) raise HomeAssistantError(f"Error when calling tool: {error}") from error return result.model_dump(exclude_unset=True, exclude_none=True) @@ -79,8 +86,6 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): """Define an object to hold MCP data.""" config_entry: ConfigEntry - _session: ClientSession | None = None - _setup_error: Exception | None = None def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize ModelContextProtocolCoordinator.""" @@ -91,52 +96,6 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): config_entry=config_entry, update_interval=UPDATE_INTERVAL, ) - self._stop = asyncio.Event() - - async def _async_setup(self) -> None: - """Set up the client connection.""" - connected = asyncio.Event() - stop = asyncio.Event() - self.config_entry.async_create_background_task( - self.hass, self._connect(connected, stop), "mcp-client" - ) - try: - async with asyncio.timeout(TIMEOUT): - await connected.wait() - self._stop = stop - finally: - if self._setup_error is not None: - raise self._setup_error - - async def _connect(self, connected: asyncio.Event, stop: asyncio.Event) -> None: - """Create a server-sent event MCP client.""" - url = self.config_entry.data[CONF_URL] - try: - async with ( - sse_client(url=url) as streams, - ClientSession(*streams) as session, - ): - await session.initialize() - self._session = session - connected.set() - await stop.wait() - except httpx.HTTPStatusError as err: - self._setup_error = err - _LOGGER.debug("Error connecting to MCP server: %s", err) - raise UpdateFailed(f"Error connecting to MCP server: {err}") from err - except ExceptionGroup as err: - self._setup_error = err.exceptions[0] - _LOGGER.debug("Error connecting to MCP server: %s", err) - raise UpdateFailed( - "Error connecting to MCP server: {err.exceptions[0]}" - ) from err.exceptions[0] - finally: - self._session = None - - async def close(self) -> None: - """Close the client connection.""" - if self._stop is not None: - self._stop.set() async def _async_update_data(self) -> list[llm.Tool]: """Fetch data from API endpoint. @@ -144,11 +103,15 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): This is the place to pre-process the data to lookup tables so entities can quickly look up their data. """ - if self._session is None: - raise UpdateFailed("No session available") try: - result = await self._session.list_tools() + async with asyncio.timeout(TIMEOUT): + async with mcp_client(self.config_entry.data[CONF_URL]) as session: + result = await session.list_tools() + except TimeoutError as error: + _LOGGER.debug("Timeout when listing tools: %s", error) + raise UpdateFailed(f"Timeout when listing tools: {error}") from error except httpx.HTTPError as err: + _LOGGER.debug("Error communicating with API: %s", err) raise UpdateFailed(f"Error communicating with API: {err}") from err _LOGGER.debug("Received tools: %s", result.tools) @@ -165,7 +128,7 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): tool.name, tool.description, parameters, - self._session, + self.config_entry.data[CONF_URL], ) ) return tools diff --git a/homeassistant/components/mcp/manifest.json b/homeassistant/components/mcp/manifest.json index ee4baf04802..9cd1e2899a6 100644 --- a/homeassistant/components/mcp/manifest.json +++ b/homeassistant/components/mcp/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mcp", "iot_class": "local_polling", "quality_scale": "silver", - "requirements": ["mcp==1.1.2"] + "requirements": ["mcp==1.5.0"] } diff --git a/homeassistant/components/mcp_server/__init__.py b/homeassistant/components/mcp_server/__init__.py index 941eccbe528..e523f46228f 100644 --- a/homeassistant/components/mcp_server/__init__.py +++ b/homeassistant/components/mcp_server/__init__.py @@ -6,7 +6,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -from . import http, llm_api +from . import http from .const import DOMAIN from .session import SessionManager from .types import MCPServerConfigEntry @@ -25,7 +25,6 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Model Context Protocol component.""" http.async_register(hass) - llm_api.async_register_api(hass) return True diff --git a/homeassistant/components/mcp_server/config_flow.py b/homeassistant/components/mcp_server/config_flow.py index 8d8d311b874..e8df68de5e2 100644 --- a/homeassistant/components/mcp_server/config_flow.py +++ b/homeassistant/components/mcp_server/config_flow.py @@ -16,7 +16,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) -from .const import DOMAIN, LLM_API, LLM_API_NAME +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -33,13 +33,6 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)} - if LLM_API not in llm_apis: - # MCP server component is not loaded yet, so make the LLM API a choice. - llm_apis = { - LLM_API: LLM_API_NAME, - **llm_apis, - } - if user_input is not None: return self.async_create_entry( title=llm_apis[user_input[CONF_LLM_HASS_API]], data=user_input diff --git a/homeassistant/components/mcp_server/const.py b/homeassistant/components/mcp_server/const.py index 8958ac36616..3f2e12cbb6a 100644 --- a/homeassistant/components/mcp_server/const.py +++ b/homeassistant/components/mcp_server/const.py @@ -2,5 +2,6 @@ DOMAIN = "mcp_server" TITLE = "Model Context Protocol Server" -LLM_API = "stateless_assist" -LLM_API_NAME = "Stateless Assist" +# The Stateless API is no longer registered explicitly, but this name may still exist in the +# users config entry. +STATELESS_LLM_API = "stateless_assist" diff --git a/homeassistant/components/mcp_server/llm_api.py b/homeassistant/components/mcp_server/llm_api.py deleted file mode 100644 index 5c29b29153e..00000000000 --- a/homeassistant/components/mcp_server/llm_api.py +++ /dev/null @@ -1,48 +0,0 @@ -"""LLM API for MCP Server.""" - -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import llm -from homeassistant.util import yaml as yaml_util - -from .const import LLM_API, LLM_API_NAME - -EXPOSED_ENTITY_FIELDS = {"name", "domain", "description", "areas", "names"} - - -def async_register_api(hass: HomeAssistant) -> None: - """Register the LLM API.""" - llm.async_register_api(hass, StatelessAssistAPI(hass)) - - -class StatelessAssistAPI(llm.AssistAPI): - """LLM API for MCP Server that provides the Assist API without state information in the prompt. - - Syncing the state information is possible, but may put unnecessary load on - the system so we are instead providing the prompt without entity state. Since - actions don't care about the current state, there is little quality loss. - """ - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize the StatelessAssistAPI.""" - super().__init__(hass) - self.id = LLM_API - self.name = LLM_API_NAME - - @callback - def _async_get_exposed_entities_prompt( - self, llm_context: llm.LLMContext, exposed_entities: dict | None - ) -> list[str]: - """Return the prompt for the exposed entities.""" - prompt = [] - - if exposed_entities and exposed_entities["entities"]: - prompt.append( - "An overview of the areas and the devices in this smart home:" - ) - entities = [ - {k: v for k, v in entity_info.items() if k in EXPOSED_ENTITY_FIELDS} - for entity_info in exposed_entities["entities"].values() - ] - prompt.append(yaml_util.dump(list(entities))) - - return prompt diff --git a/homeassistant/components/mcp_server/manifest.json b/homeassistant/components/mcp_server/manifest.json index 18b2e5bc417..b5fb1bdcd87 100644 --- a/homeassistant/components/mcp_server/manifest.json +++ b/homeassistant/components/mcp_server/manifest.json @@ -8,6 +8,6 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "silver", - "requirements": ["mcp==1.1.2", "aiohttp_sse==2.2.0", "anyio==4.8.0"], + "requirements": ["mcp==1.5.0", "aiohttp_sse==2.2.0", "anyio==4.9.0"], "single_config_entry": true } diff --git a/homeassistant/components/mcp_server/server.py b/homeassistant/components/mcp_server/server.py index ba21abd722c..affa4faecd6 100644 --- a/homeassistant/components/mcp_server/server.py +++ b/homeassistant/components/mcp_server/server.py @@ -21,6 +21,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import llm +from .const import STATELESS_LLM_API + _LOGGER = logging.getLogger(__name__) @@ -47,16 +49,23 @@ async def create_server( A Model Context Protocol Server object is associated with a single session. The MCP SDK handles the details of the protocol. """ + if llm_api_id == STATELESS_LLM_API: + llm_api_id = llm.LLM_API_ASSIST - server = Server("home-assistant") + server = Server[Any]("home-assistant") + + async def get_api_instance() -> llm.APIInstance: + """Get the LLM API selected.""" + # Backwards compatibility with old MCP Server config + return await llm.async_get_api(hass, llm_api_id, llm_context) @server.list_prompts() # type: ignore[no-untyped-call, misc] async def handle_list_prompts() -> list[types.Prompt]: - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() return [ types.Prompt( name=llm_api.api.name, - description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", + description=f"Default prompt for Home Assistant {llm_api.api.name} API", ) ] @@ -64,12 +73,12 @@ async def create_server( async def handle_get_prompt( name: str, arguments: dict[str, str] | None ) -> types.GetPromptResult: - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() if name != llm_api.api.name: raise ValueError(f"Unknown prompt: {name}") return types.GetPromptResult( - description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", + description=f"Default prompt for Home Assistant {llm_api.api.name} API", messages=[ types.PromptMessage( role="assistant", @@ -84,13 +93,13 @@ async def create_server( @server.list_tools() # type: ignore[no-untyped-call, misc] async def list_tools() -> list[types.Tool]: """List available time tools.""" - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools] @server.call_tool() # type: ignore[no-untyped-call, misc] async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]: """Handle calling tools.""" - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() tool_input = llm.ToolInput(tool_name=name, tool_args=arguments) _LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index fa63252e837..186fc4c4ac0 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -146,11 +146,11 @@ "services": { "get_mealplan": { "name": "Get mealplan", - "description": "Get mealplan from Mealie", + "description": "Gets a mealplan from Mealie", "fields": { "config_entry_id": { "name": "Mealie instance", - "description": "Select the Mealie instance to get mealplan from" + "description": "The Mealie instance to use for this action." }, "start_date": { "name": "Start date", @@ -164,7 +164,7 @@ }, "get_recipe": { "name": "Get recipe", - "description": "Get recipe from Mealie", + "description": "Gets a recipe from Mealie", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -178,7 +178,7 @@ }, "import_recipe": { "name": "Import recipe", - "description": "Import recipe from an URL", + "description": "Imports a recipe from an URL", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -196,7 +196,7 @@ }, "set_random_mealplan": { "name": "Set random mealplan", - "description": "Set a random mealplan for a specific date", + "description": "Sets a random mealplan for a specific date", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -214,7 +214,7 @@ }, "set_mealplan": { "name": "Set a mealplan", - "description": "Set a mealplan for a specific date", + "description": "Sets a mealplan for a specific date", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", diff --git a/homeassistant/components/meater/config_flow.py b/homeassistant/components/meater/config_flow.py index a7ba3ba1498..5c11b10755c 100644 --- a/homeassistant/components/meater/config_flow.py +++ b/homeassistant/components/meater/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from meater import AuthenticationError, MeaterApi, ServiceUnavailableError @@ -14,6 +15,8 @@ from homeassistant.helpers import aiohttp_client from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) USER_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} @@ -84,7 +87,8 @@ class MeaterConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except ServiceUnavailableError: errors["base"] = "service_unavailable_error" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown_auth_error" else: data = {"username": username, "password": password} diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 575c0fa878d..e049a827c75 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2025.02.19"], + "requirements": ["yt-dlp[default]==2025.03.26"], "single_config_entry": true } diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index a30b01694fa..45d08bea7ce 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -1031,7 +1031,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if self.state in { MediaPlayerState.OFF, - MediaPlayerState.IDLE, MediaPlayerState.STANDBY, }: await self.async_turn_on() diff --git a/homeassistant/components/melcloud/climate.py b/homeassistant/components/melcloud/climate.py index 9c2ee60b12c..682a28ea080 100644 --- a/homeassistant/components/melcloud/climate.py +++ b/homeassistant/components/melcloud/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import Any, cast from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice import pymelcloud.ata_device as ata @@ -236,7 +236,7 @@ class AtaDeviceClimate(MelCloudClimate): set_dict: dict[str, Any] = {} if ATTR_HVAC_MODE in kwargs: self._apply_set_hvac_mode( - kwargs.get(ATTR_HVAC_MODE, self.hvac_mode), set_dict + cast(HVACMode, kwargs.get(ATTR_HVAC_MODE, self.hvac_mode)), set_dict ) if ATTR_TEMPERATURE in kwargs: diff --git a/homeassistant/components/meteo_france/__init__.py b/homeassistant/components/meteo_france/__init__.py index 5c4ada6b5f1..5f1d5269538 100644 --- a/homeassistant/components/meteo_france/__init__.py +++ b/homeassistant/components/meteo_france/__init__.py @@ -57,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Fetch data from API endpoint.""" assert isinstance(department, str) return await hass.async_add_executor_job( - client.get_warning_current_phenomenoms, department, 0, True + client.get_warning_current_phenomenons, department, 0, True ) coordinator_forecast = DataUpdateCoordinator( diff --git a/homeassistant/components/meteo_france/manifest.json b/homeassistant/components/meteo_france/manifest.json index 567788ec479..d82d0c3f91b 100644 --- a/homeassistant/components/meteo_france/manifest.json +++ b/homeassistant/components/meteo_france/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/meteo_france", "iot_class": "cloud_polling", "loggers": ["meteofrance_api"], - "requirements": ["meteofrance-api==1.3.0"] + "requirements": ["meteofrance-api==1.4.0"] } diff --git a/homeassistant/components/meteo_france/sensor.py b/homeassistant/components/meteo_france/sensor.py index c29cc1ceda9..7333f7b0c19 100644 --- a/homeassistant/components/meteo_france/sensor.py +++ b/homeassistant/components/meteo_france/sensor.py @@ -7,7 +7,7 @@ from typing import Any from meteofrance_api.helpers import ( get_warning_text_status_from_indice_color, - readeable_phenomenoms_dict, + readable_phenomenons_dict, ) from meteofrance_api.model.forecast import Forecast from meteofrance_api.model.rain import Rain @@ -336,7 +336,7 @@ class MeteoFranceAlertSensor(MeteoFranceSensor[CurrentPhenomenons]): def extra_state_attributes(self): """Return the state attributes.""" return { - **readeable_phenomenoms_dict(self.coordinator.data.phenomenons_max_colors), + **readable_phenomenons_dict(self.coordinator.data.phenomenons_max_colors), } diff --git a/homeassistant/components/meteoclimatic/sensor.py b/homeassistant/components/meteoclimatic/sensor.py index 169da7a0a18..6e508bd63d8 100644 --- a/homeassistant/components/meteoclimatic/sensor.py +++ b/homeassistant/components/meteoclimatic/sensor.py @@ -102,6 +102,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:weather-windy", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="rain", diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index 61df7206402..52642cc32e3 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -79,6 +79,16 @@ from .const import ( CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, CONF_FANS, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -297,6 +307,45 @@ CLIMATE_SCHEMA = vol.All( vol.Optional(CONF_WRITE_REGISTERS, default=False): cv.boolean, } ), + vol.Optional(CONF_HVAC_ACTION_REGISTER): vol.Maybe( + { + CONF_ADDRESS: cv.positive_int, + CONF_HVAC_ACTION_VALUES: { + vol.Optional(CONF_HVAC_ACTION_COOLING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_DEFROSTING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_DRYING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_FAN): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_HEATING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_IDLE): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_OFF): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_PREHEATING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + }, + vol.Optional( + CONF_INPUT_TYPE, default=CALL_TYPE_REGISTER_HOLDING + ): vol.In( + [ + CALL_TYPE_REGISTER_HOLDING, + CALL_TYPE_REGISTER_INPUT, + ] + ), + } + ), vol.Optional(CONF_FAN_MODE_REGISTER): vol.Maybe( vol.All( { diff --git a/homeassistant/components/modbus/climate.py b/homeassistant/components/modbus/climate.py index fca1b94611a..be10a9495c6 100644 --- a/homeassistant/components/modbus/climate.py +++ b/homeassistant/components/modbus/climate.py @@ -24,6 +24,7 @@ from homeassistant.components.climate import ( SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, + HVACAction, HVACMode, ) from homeassistant.const import ( @@ -61,6 +62,16 @@ from .const import ( CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -74,6 +85,7 @@ from .const import ( CONF_HVAC_ON_VALUE, CONF_HVAC_ONOFF_COIL, CONF_HVAC_ONOFF_REGISTER, + CONF_INPUT_TYPE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_STEP, @@ -188,6 +200,34 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._attr_hvac_mode = HVACMode.AUTO self._attr_hvac_modes = [HVACMode.AUTO] + if CONF_HVAC_ACTION_REGISTER in config: + action_config = config[CONF_HVAC_ACTION_REGISTER] + self._hvac_action_register = action_config[CONF_ADDRESS] + self._hvac_action_type = action_config[CONF_INPUT_TYPE] + + self._attr_hvac_action = None + self._hvac_action_mapping: list[tuple[int, HVACAction]] = [] + action_value_config = action_config[CONF_HVAC_ACTION_VALUES] + + for hvac_action_kw, hvac_action in ( + (CONF_HVAC_ACTION_COOLING, HVACAction.COOLING), + (CONF_HVAC_ACTION_DEFROSTING, HVACAction.DEFROSTING), + (CONF_HVAC_ACTION_DRYING, HVACAction.DRYING), + (CONF_HVAC_ACTION_FAN, HVACAction.FAN), + (CONF_HVAC_ACTION_HEATING, HVACAction.HEATING), + (CONF_HVAC_ACTION_IDLE, HVACAction.IDLE), + (CONF_HVAC_ACTION_OFF, HVACAction.OFF), + (CONF_HVAC_ACTION_PREHEATING, HVACAction.PREHEATING), + ): + if hvac_action_kw in action_value_config: + values = action_value_config[hvac_action_kw] + if not isinstance(values, list): + values = [values] + for value in values: + self._hvac_action_mapping.append((value, hvac_action)) + else: + self._hvac_action_register = None + if CONF_FAN_MODE_REGISTER in config: self._attr_supported_features = ( self._attr_supported_features | ClimateEntityFeature.FAN_MODE @@ -216,7 +256,6 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._fan_mode_mapping_from_modbus[value] = fan_mode self._fan_mode_mapping_to_modbus[fan_mode] = value self._attr_fan_modes.append(fan_mode) - else: # No FAN modes defined self._fan_mode_register = None @@ -457,6 +496,20 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._attr_hvac_mode = mode break + # Read the HVAC action register if defined + if self._hvac_action_register is not None: + hvac_action = await self._async_read_register( + self._hvac_action_type, self._hvac_action_register, raw=True + ) + + # Translate the value received + if hvac_action is not None: + self._attr_hvac_action = None + for value, action in self._hvac_action_mapping: + if hvac_action == value: + self._attr_hvac_action = action + break + # Read the Fan mode register if defined if self._fan_mode_register is not None: fan_mode = await self._async_read_register( diff --git a/homeassistant/components/modbus/const.py b/homeassistant/components/modbus/const.py index 5926569040d..634637a6b08 100644 --- a/homeassistant/components/modbus/const.py +++ b/homeassistant/components/modbus/const.py @@ -63,6 +63,16 @@ CONF_HVAC_ONOFF_REGISTER = "hvac_onoff_register" CONF_HVAC_ON_VALUE = "hvac_on_value" CONF_HVAC_OFF_VALUE = "hvac_off_value" CONF_HVAC_ONOFF_COIL = "hvac_onoff_coil" +CONF_HVAC_ACTION_REGISTER = "hvac_action_register" +CONF_HVAC_ACTION_COOLING = "action_cooling" +CONF_HVAC_ACTION_DEFROSTING = "action_defrosting" +CONF_HVAC_ACTION_DRYING = "action_drying" +CONF_HVAC_ACTION_FAN = "action_fan" +CONF_HVAC_ACTION_HEATING = "action_heating" +CONF_HVAC_ACTION_IDLE = "action_idle" +CONF_HVAC_ACTION_OFF = "action_off" +CONF_HVAC_ACTION_PREHEATING = "action_preheating" +CONF_HVAC_ACTION_VALUES = "values" CONF_HVAC_MODE_OFF = "state_off" CONF_HVAC_MODE_HEAT = "state_heat" CONF_HVAC_MODE_COOL = "state_cool" diff --git a/homeassistant/components/moehlenhoff_alpha2/manifest.json b/homeassistant/components/moehlenhoff_alpha2/manifest.json index 14f40991a84..45b7f8c9565 100644 --- a/homeassistant/components/moehlenhoff_alpha2/manifest.json +++ b/homeassistant/components/moehlenhoff_alpha2/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/moehlenhoff_alpha2", "iot_class": "local_push", - "requirements": ["moehlenhoff-alpha2==1.3.1"] + "requirements": ["moehlenhoff-alpha2==1.4.0"] } diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index a7bb34af1e6..954f9e25c21 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from motionblinds import MotionDiscovery, MotionGateway @@ -28,6 +29,8 @@ from .const import ( ) from .gateway import ConnectMotionGateway +_LOGGER = logging.getLogger(__name__) + CONFIG_SCHEMA = vol.Schema( { vol.Optional(CONF_HOST): str, @@ -93,7 +96,8 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): try: # key not needed for GetDeviceList request await self.hass.async_add_executor_job(gateway.GetDeviceList) - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Failed to connect to Motion Gateway") return self.async_abort(reason="not_motionblinds") if not gateway.available: diff --git a/homeassistant/components/motion_blinds/strings.json b/homeassistant/components/motion_blinds/strings.json index ddbf928462a..12060cd69f0 100644 --- a/homeassistant/components/motion_blinds/strings.json +++ b/homeassistant/components/motion_blinds/strings.json @@ -3,20 +3,20 @@ "flow_title": "{short_mac} ({ip_address})", "step": { "user": { - "description": "Connect to your Motion Gateway, if the IP address is not set, auto-discovery is used", + "description": "Connect to your Motionblinds gateway. If the IP address is not set, auto-discovery is used", "data": { "host": "[%key:common::config_flow::data::ip%]" } }, "connect": { - "description": "You will need the 16 character API Key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-key for instructions", + "description": "You will need the 16 character API key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-api-key for instructions", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } }, "select": { - "title": "Select the Motion Gateway that you wish to connect", - "description": "Run the setup again if you want to connect additional Motion Gateways", + "title": "Select the Motionblinds gateway that you wish to connect", + "description": "Run the setup again if you want to connect additional Motionblinds gateways", "data": { "select_ip": "[%key:common::config_flow::data::ip%]" } @@ -29,7 +29,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "connection_error": "[%key:common::config_flow::error::cannot_connect%]", - "not_motionblinds": "Discovered device is not a Motion gateway" + "not_motionblinds": "Discovered device is not a Motionblinds gateway" } }, "options": { diff --git a/homeassistant/components/motionblinds_ble/strings.json b/homeassistant/components/motionblinds_ble/strings.json index d6532f12386..ec1fb080854 100644 --- a/homeassistant/components/motionblinds_ble/strings.json +++ b/homeassistant/components/motionblinds_ble/strings.json @@ -1,8 +1,8 @@ { "config": { "abort": { - "no_bluetooth_adapter": "No bluetooth adapter found", - "no_devices_found": "Could not find any bluetooth devices" + "no_bluetooth_adapter": "No Bluetooth adapter found", + "no_devices_found": "Could not find any Bluetooth devices" }, "error": { "could_not_find_motor": "Could not find a motor with that MAC code", diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 6656afe2c8a..ae010bf18c9 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant import config as conf_util from homeassistant.components import websocket_api from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DISCOVERY, SERVICE_RELOAD +from homeassistant.const import CONF_DISCOVERY, CONF_PLATFORM, SERVICE_RELOAD from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ( ConfigValidationError, @@ -81,6 +81,7 @@ from .const import ( ENTRY_OPTION_FIELDS, MQTT_CONNECTION_STATE, TEMPLATE_ERRORS, + Platform, ) from .models import ( DATA_MQTT, @@ -293,6 +294,21 @@ async def async_check_config_schema( ) from exc +def _platforms_in_use(hass: HomeAssistant, entry: ConfigEntry) -> set[str | Platform]: + """Return a set of platforms in use.""" + domains: set[str | Platform] = { + entry.domain + for entry in er.async_entries_for_config_entry( + er.async_get(hass), entry.entry_id + ) + } + # Update with domains from subentries + for subentry in entry.subentries.values(): + components = subentry.data["components"].values() + domains.update(component[CONF_PLATFORM] for component in components) + return domains + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the actions and websocket API for the MQTT component.""" @@ -434,12 +450,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: mqtt_data, conf = await _setup_client() platforms_used = platforms_from_config(mqtt_data.config) - platforms_used.update( - entry.domain - for entry in er.async_entries_for_config_entry( - er.async_get(hass), entry.entry_id - ) - ) + platforms_used.update(_platforms_in_use(hass, entry)) integration = async_get_loaded_integration(hass, DOMAIN) # Preload platforms we know we are going to use so # discovery can setup each platform synchronously diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index 2d73cc5865c..a9037a5f247 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -150,6 +150,7 @@ ABBREVIATIONS = { "pl_rst_pct": "payload_reset_percentage", "pl_rst_pr_mode": "payload_reset_preset_mode", "pl_stop": "payload_stop", + "pl_stop_tilt": "payload_stop_tilt", "pl_strt": "payload_start", "pl_ret": "payload_return_to_base", "pl_toff": "payload_turn_off", diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index d35b3db7518..f6f53599363 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -15,6 +15,7 @@ import socket import ssl import time from typing import TYPE_CHECKING, Any +from uuid import uuid4 import certifi @@ -292,7 +293,7 @@ class MqttClientSetup: """ # We don't import on the top because some integrations # should be able to optionally rely on MQTT. - import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel + from paho.mqtt import client as mqtt # pylint: disable=import-outside-toplevel # pylint: disable-next=import-outside-toplevel from .async_client import AsyncMQTTClient @@ -309,9 +310,10 @@ class MqttClientSetup: clean_session = True if (client_id := config.get(CONF_CLIENT_ID)) is None: - # PAHO MQTT relies on the MQTT server to generate random client IDs. - # However, that feature is not mandatory so we generate our own. - client_id = None + # PAHO MQTT relies on the MQTT server to generate random client ID + # for protocol version 3.1, however, that feature is not mandatory + # so we generate our own. + client_id = mqtt._base62(uuid4().int, padding=22) # noqa: SLF001 transport: str = config.get(CONF_TRANSPORT, DEFAULT_TRANSPORT) self._client = AsyncMQTTClient( callback_api_version=mqtt.CallbackAPIVersion.VERSION2, @@ -1020,8 +1022,6 @@ class MQTT: Resubscribe to all topics we were subscribed to and publish birth message. """ - # pylint: disable-next=import-outside-toplevel - if reason_code.is_failure: # 24: Continue authentication # 25: Re-authenticate diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index ad188c50aa9..471b6d048a7 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -5,12 +5,15 @@ from __future__ import annotations import asyncio from collections import OrderedDict from collections.abc import Callable, Mapping +from copy import deepcopy +from dataclasses import dataclass from enum import IntEnum import logging import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast +from uuid import uuid4 from cryptography.hazmat.primitives.serialization import ( Encoding, @@ -24,26 +27,48 @@ import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file from homeassistant.components.hassio import AddonError, AddonManager, AddonState +from homeassistant.components.sensor import ( + CONF_STATE_CLASS, + DEVICE_CLASS_UNITS, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.switch import SwitchDeviceClass from homeassistant.config_entries import ( SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, + ConfigSubentryFlow, OptionsFlow, + SubentryFlowResult, ) from homeassistant.const import ( + ATTR_CONFIGURATION_URL, + ATTR_HW_VERSION, + ATTR_MODEL, + ATTR_MODEL_ID, + ATTR_NAME, + ATTR_SW_VERSION, CONF_CLIENT_ID, + CONF_DEVICE, + CONF_DEVICE_CLASS, CONF_DISCOVERY, CONF_HOST, + CONF_NAME, + CONF_OPTIMISTIC, CONF_PASSWORD, CONF_PAYLOAD, + CONF_PLATFORM, CONF_PORT, CONF_PROTOCOL, + CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME, + CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import AbortFlow -from homeassistant.helpers import config_validation as cv +from homeassistant.data_entry_flow import AbortFlow, SectionConfig, section +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( @@ -54,9 +79,12 @@ from homeassistant.helpers.selector import ( NumberSelectorConfig, NumberSelectorMode, SelectOptionDict, + Selector, SelectSelector, SelectSelectorConfig, SelectSelectorMode, + TemplateSelector, + TemplateSelectorConfig, TextSelector, TextSelectorConfig, TextSelectorType, @@ -71,13 +99,27 @@ from .const import ( ATTR_QOS, ATTR_RETAIN, ATTR_TOPIC, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_CERTIFICATE, CONF_CLIENT_CERT, CONF_CLIENT_KEY, + CONF_COMMAND_TEMPLATE, + CONF_COMMAND_TOPIC, CONF_DISCOVERY_PREFIX, + CONF_ENTITY_PICTURE, + CONF_EXPIRE_AFTER, CONF_KEEPALIVE, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_QOS, + CONF_RETAIN, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, CONF_TLS_INSECURE, CONF_TRANSPORT, CONF_WILL_MESSAGE, @@ -89,9 +131,12 @@ from .const import ( DEFAULT_DISCOVERY, DEFAULT_ENCODING, DEFAULT_KEEPALIVE, + DEFAULT_PAYLOAD_AVAILABLE, + DEFAULT_PAYLOAD_NOT_AVAILABLE, DEFAULT_PORT, DEFAULT_PREFIX, DEFAULT_PROTOCOL, + DEFAULT_QOS, DEFAULT_TRANSPORT, DEFAULT_WILL, DEFAULT_WS_PATH, @@ -99,12 +144,18 @@ from .const import ( SUPPORTED_PROTOCOLS, TRANSPORT_TCP, TRANSPORT_WEBSOCKETS, + Platform, ) +from .models import MqttAvailabilityData, MqttDeviceData, MqttSubentryData from .util import ( async_create_certificate_temp_files, get_file_path, + learn_more_url, valid_birth_will, valid_publish_topic, + valid_qos_schema, + valid_subscribe_topic, + valid_subscribe_topic_template, ) _LOGGER = logging.getLogger(__name__) @@ -128,10 +179,10 @@ PORT_SELECTOR = vol.All( vol.Coerce(int), ) PASSWORD_SELECTOR = TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)) -QOS_SELECTOR = vol.All( - NumberSelector(NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=2)), - vol.Coerce(int), +QOS_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=2) ) +QOS_DATA_SCHEMA = vol.All(QOS_SELECTOR, valid_qos_schema) KEEPALIVE_SELECTOR = vol.All( NumberSelector( NumberSelectorConfig( @@ -183,6 +234,243 @@ KEY_UPLOAD_SELECTOR = FileSelector( FileSelectorConfig(accept=".pem,.key,.der,.pk8,application/pkcs8") ) +# Subentry selectors +SUBENTRY_PLATFORMS = [Platform.NOTIFY, Platform.SENSOR, Platform.SWITCH] +SUBENTRY_PLATFORM_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[platform.value for platform in SUBENTRY_PLATFORMS], + mode=SelectSelectorMode.DROPDOWN, + translation_key=CONF_PLATFORM, + ) +) +TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) + +SUBENTRY_AVAILABILITY_SCHEMA = vol.Schema( + { + vol.Optional(CONF_AVAILABILITY_TOPIC): TEXT_SELECTOR, + vol.Optional(CONF_AVAILABILITY_TEMPLATE): TEMPLATE_SELECTOR, + vol.Optional( + CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE + ): TEXT_SELECTOR, + vol.Optional( + CONF_PAYLOAD_NOT_AVAILABLE, default=DEFAULT_PAYLOAD_NOT_AVAILABLE + ): TEXT_SELECTOR, + } +) + +# Sensor specific selectors +SENSOR_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_sensor", + sort=True, + ) +) +SENSOR_STATE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorStateClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key=CONF_STATE_CLASS, + ) +) +OPTIONS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[], + custom_value=True, + multiple=True, + ) +) +SUGGESTED_DISPLAY_PRECISION_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=9) +) +EXPIRE_AFTER_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0) +) + +# Switch specific selectors +SWITCH_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SwitchDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_switch", + ) +) + + +@callback +def validate_sensor_platform_config( + config: dict[str, Any], +) -> dict[str, str]: + """Validate the sensor options, state and device class config.""" + errors: dict[str, str] = {} + # Only allow `options` to be set for `enum` sensors + # to limit the possible sensor values + if config.get(CONF_OPTIONS) is not None: + if config.get(CONF_STATE_CLASS) or config.get(CONF_UNIT_OF_MEASUREMENT): + errors[CONF_OPTIONS] = "options_not_allowed_with_state_class_or_uom" + + if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: + errors[CONF_DEVICE_CLASS] = "options_device_class_enum" + + if ( + (device_class := config.get(CONF_DEVICE_CLASS)) == SensorDeviceClass.ENUM + and errors is not None + and CONF_OPTIONS not in config + ): + errors[CONF_OPTIONS] = "options_with_enum_device_class" + + if ( + device_class in DEVICE_CLASS_UNITS + and (unit_of_measurement := config.get(CONF_UNIT_OF_MEASUREMENT)) is None + and errors is not None + ): + # Do not allow an empty unit of measurement in a subentry data flow + errors[CONF_UNIT_OF_MEASUREMENT] = "uom_required_for_device_class" + return errors + + if ( + device_class is not None + and device_class in DEVICE_CLASS_UNITS + and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class] + ): + errors[CONF_UNIT_OF_MEASUREMENT] = "invalid_uom" + + return errors + + +@dataclass(frozen=True) +class PlatformField: + """Stores a platform config field schema, required flag and validator.""" + + selector: Selector[Any] | Callable[..., Selector[Any]] + required: bool + validator: Callable[..., Any] + error: str | None = None + default: str | int | vol.Undefined = vol.UNDEFINED + exclude_from_reconfig: bool = False + conditions: tuple[dict[str, Any], ...] | None = None + custom_filtering: bool = False + section: str | None = None + + +@callback +def unit_of_measurement_selector(user_data: dict[str, Any | None]) -> Selector: + """Return a context based unit of measurement selector.""" + if ( + user_data is None + or (device_class := user_data.get(CONF_DEVICE_CLASS)) is None + or device_class not in DEVICE_CLASS_UNITS + ): + return TEXT_SELECTOR + return SelectSelector( + SelectSelectorConfig( + options=[str(uom) for uom in DEVICE_CLASS_UNITS[device_class]], + sort=True, + custom_value=True, + ) + ) + + +COMMON_ENTITY_FIELDS = { + CONF_PLATFORM: PlatformField( + SUBENTRY_PLATFORM_SELECTOR, True, str, exclude_from_reconfig=True + ), + CONF_NAME: PlatformField(TEXT_SELECTOR, False, str, exclude_from_reconfig=True), + CONF_ENTITY_PICTURE: PlatformField(TEXT_SELECTOR, False, cv.url, "invalid_url"), +} + +PLATFORM_ENTITY_FIELDS = { + Platform.NOTIFY.value: {}, + Platform.SENSOR.value: { + CONF_DEVICE_CLASS: PlatformField(SENSOR_DEVICE_CLASS_SELECTOR, False, str), + CONF_STATE_CLASS: PlatformField(SENSOR_STATE_CLASS_SELECTOR, False, str), + CONF_UNIT_OF_MEASUREMENT: PlatformField( + unit_of_measurement_selector, False, str, custom_filtering=True + ), + CONF_SUGGESTED_DISPLAY_PRECISION: PlatformField( + SUGGESTED_DISPLAY_PRECISION_SELECTOR, + False, + cv.positive_int, + section="advanced_settings", + ), + CONF_OPTIONS: PlatformField( + OPTIONS_SELECTOR, + False, + cv.ensure_list, + conditions=({"device_class": "enum"},), + ), + }, + Platform.SWITCH.value: { + CONF_DEVICE_CLASS: PlatformField(SWITCH_DEVICE_CLASS_SELECTOR, False, str), + }, +} +PLATFORM_MQTT_FIELDS = { + Platform.NOTIFY.value: { + CONF_COMMAND_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_publish_topic, "invalid_publish_topic" + ), + CONF_COMMAND_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_RETAIN: PlatformField(BOOLEAN_SELECTOR, False, bool), + }, + Platform.SENSOR.value: { + CONF_STATE_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_subscribe_topic, "invalid_subscribe_topic" + ), + CONF_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_LAST_RESET_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, + False, + cv.template, + "invalid_template", + conditions=({CONF_STATE_CLASS: "total"},), + ), + CONF_EXPIRE_AFTER: PlatformField( + EXPIRE_AFTER_SELECTOR, False, cv.positive_int, section="advanced_settings" + ), + }, + Platform.SWITCH.value: { + CONF_COMMAND_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_publish_topic, "invalid_publish_topic" + ), + CONF_COMMAND_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_STATE_TOPIC: PlatformField( + TEXT_SELECTOR, False, valid_subscribe_topic, "invalid_subscribe_topic" + ), + CONF_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_RETAIN: PlatformField(BOOLEAN_SELECTOR, False, bool), + CONF_OPTIMISTIC: PlatformField(BOOLEAN_SELECTOR, False, bool), + }, +} +ENTITY_CONFIG_VALIDATOR: dict[ + str, + Callable[[dict[str, Any]], dict[str, str]] | None, +] = { + Platform.NOTIFY.value: None, + Platform.SENSOR.value: validate_sensor_platform_config, + Platform.SWITCH.value: None, +} + +MQTT_DEVICE_PLATFORM_FIELDS = { + ATTR_NAME: PlatformField(TEXT_SELECTOR, False, str), + ATTR_SW_VERSION: PlatformField(TEXT_SELECTOR, False, str), + ATTR_HW_VERSION: PlatformField(TEXT_SELECTOR, False, str), + ATTR_MODEL: PlatformField(TEXT_SELECTOR, False, str), + ATTR_MODEL_ID: PlatformField(TEXT_SELECTOR, False, str), + ATTR_CONFIGURATION_URL: PlatformField(TEXT_SELECTOR, False, cv.url, "invalid_url"), + CONF_QOS: PlatformField( + QOS_SELECTOR, False, int, default=DEFAULT_QOS, section="mqtt_settings" + ), +} + REAUTH_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): TEXT_SELECTOR, @@ -215,6 +503,170 @@ def update_password_from_user_input( return substituted_used_data +@callback +def validate_field( + field: str, + validator: Callable[..., Any], + user_input: dict[str, Any] | None, + errors: dict[str, str], + error: str, +) -> None: + """Validate a single field.""" + if user_input is None or field not in user_input: + return + try: + validator(user_input[field]) + except (ValueError, vol.Invalid): + errors[field] = error + + +@callback +def _check_conditions( + platform_field: PlatformField, component_data: dict[str, Any] | None = None +) -> bool: + """Only include field if one of conditions match, or no conditions are set.""" + if platform_field.conditions is None or component_data is None: + return True + return any( + all(component_data.get(key) == value for key, value in condition.items()) + for condition in platform_field.conditions + ) + + +@callback +def calculate_merged_config( + merged_user_input: dict[str, Any], + data_schema_fields: dict[str, PlatformField], + component_data: dict[str, Any], +) -> dict[str, Any]: + """Calculate merged config.""" + base_schema_fields = { + key + for key, platform_field in data_schema_fields.items() + if _check_conditions(platform_field, component_data) + } - set(merged_user_input) + return { + key: value + for key, value in component_data.items() + if key not in base_schema_fields + } | merged_user_input + + +@callback +def validate_user_input( + user_input: dict[str, Any], + data_schema_fields: dict[str, PlatformField], + *, + component_data: dict[str, Any] | None = None, + config_validator: Callable[[dict[str, Any]], dict[str, str]] | None = None, +) -> tuple[dict[str, Any], dict[str, str]]: + """Validate user input.""" + errors: dict[str, str] = {} + # Merge sections + merged_user_input: dict[str, Any] = {} + for key, value in user_input.items(): + if isinstance(value, dict): + merged_user_input.update(value) + else: + merged_user_input[key] = value + + for field, value in merged_user_input.items(): + validator = data_schema_fields[field].validator + try: + validator(value) + except (ValueError, vol.Invalid): + errors[field] = data_schema_fields[field].error or "invalid_input" + + if config_validator is not None: + if TYPE_CHECKING: + assert component_data is not None + + errors |= config_validator( + calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ), + ) + + return merged_user_input, errors + + +@callback +def data_schema_from_fields( + data_schema_fields: dict[str, PlatformField], + reconfig: bool, + component_data: dict[str, Any] | None = None, + user_input: dict[str, Any] | None = None, + device_data: MqttDeviceData | None = None, +) -> vol.Schema: + """Generate custom data schema from platform fields or device data.""" + if device_data is not None: + component_data_with_user_input: dict[str, Any] | None = dict(device_data) + if TYPE_CHECKING: + assert component_data_with_user_input is not None + component_data_with_user_input.update( + component_data_with_user_input.pop("mqtt_settings", {}) + ) + else: + component_data_with_user_input = deepcopy(component_data) + if component_data_with_user_input is not None and user_input is not None: + component_data_with_user_input |= user_input + + sections: dict[str | None, None] = { + field_details.section: None for field_details in data_schema_fields.values() + } + data_schema: dict[Any, Any] = {} + all_data_element_options: set[Any] = set() + no_reconfig_options: set[Any] = set() + for schema_section in sections: + data_schema_element = { + vol.Required(field_name, default=field_details.default) + if field_details.required + else vol.Optional( + field_name, default=field_details.default + ): field_details.selector(component_data_with_user_input) # type: ignore[operator] + if field_details.custom_filtering + else field_details.selector + for field_name, field_details in data_schema_fields.items() + if field_details.section == schema_section + and (not field_details.exclude_from_reconfig or not reconfig) + and _check_conditions(field_details, component_data_with_user_input) + } + data_element_options = set(data_schema_element) + all_data_element_options |= data_element_options + no_reconfig_options |= { + field_name + for field_name, field_details in data_schema_fields.items() + if field_details.section == schema_section + and field_details.exclude_from_reconfig + } + if schema_section is None: + data_schema.update(data_schema_element) + continue + collapsed = ( + not any( + (default := data_schema_fields[str(option)].default) is vol.UNDEFINED + or component_data_with_user_input[str(option)] != default + for option in data_element_options + if option in component_data_with_user_input + ) + if component_data_with_user_input is not None + else True + ) + data_schema[vol.Optional(schema_section)] = section( + vol.Schema(data_schema_element), SectionConfig({"collapsed": collapsed}) + ) + + # Reset all fields from the component_data not in the schema + if component_data: + filtered_fields = ( + set(data_schema_fields) - all_data_element_options - no_reconfig_options + ) + for field in filtered_fields: + if field in component_data: + del component_data[field] + return vol.Schema(data_schema) + + class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" @@ -230,6 +682,14 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self.install_task: asyncio.Task | None = None self.start_task: asyncio.Task | None = None + @classmethod + @callback + def async_get_supported_subentry_types( + cls, config_entry: ConfigEntry + ) -> dict[str, type[ConfigSubentryFlow]]: + """Return subentries supported by this handler.""" + return {CONF_DEVICE: MQTTSubentryFlowHandler} + @staticmethod @callback def async_get_options_flow( @@ -685,7 +1145,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): "birth_payload", description={"suggested_value": birth[CONF_PAYLOAD]} ) ] = TEXT_SELECTOR - fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = QOS_SELECTOR + fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = QOS_DATA_SCHEMA fields[vol.Optional("birth_retain", default=birth[ATTR_RETAIN])] = ( BOOLEAN_SELECTOR ) @@ -708,7 +1168,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): "will_payload", description={"suggested_value": will[CONF_PAYLOAD]} ) ] = TEXT_SELECTOR - fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = QOS_SELECTOR + fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = QOS_DATA_SCHEMA fields[vol.Optional("will_retain", default=will[ATTR_RETAIN])] = ( BOOLEAN_SELECTOR ) @@ -721,6 +1181,436 @@ class MQTTOptionsFlowHandler(OptionsFlow): ) +class MQTTSubentryFlowHandler(ConfigSubentryFlow): + """Handle MQTT subentry flow.""" + + _subentry_data: MqttSubentryData + _component_id: str | None = None + + @callback + def update_component_fields( + self, + data_schema_fields: dict[str, PlatformField], + merged_user_input: dict[str, Any], + ) -> None: + """Update the componment fields.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + # Remove the fields from the component data + # if they are not in the schema and not in the user input + config = calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ) + for field in ( + field + for field, platform_field in data_schema_fields.items() + if field in (set(component_data) - set(config)) + and not platform_field.exclude_from_reconfig + ): + component_data.pop(field) + component_data.update(merged_user_input) + + @callback + def generate_names(self) -> tuple[str, str]: + """Generate the device and full entity name.""" + if TYPE_CHECKING: + assert self._component_id is not None + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + if entity_name := self._subentry_data["components"][self._component_id].get( + CONF_NAME + ): + full_entity_name: str = f"{device_name} {entity_name}" + else: + full_entity_name = device_name + return device_name, full_entity_name + + @callback + def get_suggested_values_from_component( + self, data_schema: vol.Schema + ) -> dict[str, Any]: + """Get suggestions from component data based on the data schema.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + return { + field_key: self.get_suggested_values_from_component(value.schema) + if isinstance(value, section) + else component_data.get(field_key) + for field_key, value in data_schema.schema.items() + } + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add a subentry.""" + self._subentry_data = MqttSubentryData(device=MqttDeviceData(), components={}) + return await self.async_step_device() + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure a subentry.""" + reconfigure_subentry = self._get_reconfigure_subentry() + self._subentry_data = cast( + MqttSubentryData, deepcopy(dict(reconfigure_subentry.data)) + ) + return await self.async_step_summary_menu() + + async def async_step_device( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add a new MQTT device.""" + errors: dict[str, Any] = {} + device_data = self._subentry_data[CONF_DEVICE] + data_schema = data_schema_from_fields( + MQTT_DEVICE_PLATFORM_FIELDS, + device_data=device_data, + reconfig=True, + ) + if user_input is not None: + merged_user_input, errors = validate_user_input( + user_input, MQTT_DEVICE_PLATFORM_FIELDS + ) + if not errors: + self._subentry_data[CONF_DEVICE] = cast( + MqttDeviceData, merged_user_input + ) + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_summary_menu() + return await self.async_step_entity() + data_schema = self.add_suggested_values_to_schema( + data_schema, device_data if user_input is None else user_input + ) + return self.async_show_form( + step_id=CONF_DEVICE, + data_schema=data_schema, + errors=errors, + last_step=False, + ) + + async def async_step_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add or edit an mqtt entity.""" + errors: dict[str, str] = {} + data_schema_fields = COMMON_ENTITY_FIELDS + entity_name_label: str = "" + platform_label: str = "" + component_data: dict[str, Any] | None = None + if reconfig := (self._component_id is not None): + component_data = self._subentry_data["components"][self._component_id] + name: str | None = component_data.get(CONF_NAME) + platform_label = f"{self._subentry_data['components'][self._component_id][CONF_PLATFORM]} " + entity_name_label = f" ({name})" if name is not None else "" + data_schema = data_schema_from_fields(data_schema_fields, reconfig=reconfig) + if user_input is not None: + merged_user_input, errors = validate_user_input( + user_input, data_schema_fields, component_data=component_data + ) + if not errors: + if self._component_id is None: + self._component_id = uuid4().hex + self._subentry_data["components"].setdefault(self._component_id, {}) + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_entity_platform_config() + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + elif self.source == SOURCE_RECONFIGURE and self._component_id is not None: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + return self.async_show_form( + step_id="entity", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + "entity_name_label": entity_name_label, + "platform_label": platform_label, + }, + errors=errors, + last_step=False, + ) + + def _show_update_or_delete_form(self, step_id: str) -> SubentryFlowResult: + """Help selecting an entity to update or delete.""" + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + entities = [ + SelectOptionDict( + value=key, + label=f"{device_name} {component_data.get(CONF_NAME, '-')}" + f" ({component_data[CONF_PLATFORM]})", + ) + for key, component_data in self._subentry_data["components"].items() + ] + data_schema = vol.Schema( + { + vol.Required("component"): SelectSelector( + SelectSelectorConfig( + options=entities, + mode=SelectSelectorMode.LIST, + ) + ) + } + ) + return self.async_show_form( + step_id=step_id, data_schema=data_schema, last_step=False + ) + + async def async_step_update_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Select the entity to update.""" + if user_input: + self._component_id = user_input["component"] + return await self.async_step_entity() + if len(self._subentry_data["components"]) == 1: + # Return first key + self._component_id = next(iter(self._subentry_data["components"])) + return await self.async_step_entity() + return self._show_update_or_delete_form("update_entity") + + async def async_step_delete_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Select the entity to delete.""" + if user_input: + del self._subentry_data["components"][user_input["component"]] + return await self.async_step_summary_menu() + return self._show_update_or_delete_form("delete_entity") + + async def async_step_entity_platform_config( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure platform entity details.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_ENTITY_FIELDS[platform] + errors: dict[str, str] = {} + + data_schema = data_schema_from_fields( + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, + user_input=user_input, + ) + if not data_schema.schema: + return await self.async_step_mqtt_platform_config() + if user_input is not None: + # Test entity fields against the validator + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) + if not errors: + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_mqtt_platform_config() + + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + else: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + + device_name, full_entity_name = self.generate_names() + return self.async_show_form( + step_id="entity_platform_config", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + CONF_PLATFORM: platform, + "entity": full_entity_name, + "url": learn_more_url(platform), + } + | (user_input or {}), + errors=errors, + last_step=False, + ) + + async def async_step_mqtt_platform_config( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure entity platform MQTT details.""" + errors: dict[str, str] = {} + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_MQTT_FIELDS[platform] + data_schema = data_schema_from_fields( + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, + ) + if user_input is not None: + # Test entity fields against the validator + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) + if not errors: + self.update_component_fields(data_schema_fields, merged_user_input) + self._component_id = None + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_summary_menu() + return self._async_create_subentry() + + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + else: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + device_name, full_entity_name = self.generate_names() + return self.async_show_form( + step_id="mqtt_platform_config", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + CONF_PLATFORM: platform, + "entity": full_entity_name, + "url": learn_more_url(platform), + }, + errors=errors, + last_step=False, + ) + + @callback + def _async_create_subentry( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Create a subentry for a new MQTT device.""" + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + component_data: dict[str, Any] = next( + iter(self._subentry_data["components"].values()) + ) + platform = component_data[CONF_PLATFORM] + entity_name: str | None + if entity_name := component_data.get(CONF_NAME): + full_entity_name: str = f"{device_name} {entity_name}" + else: + full_entity_name = device_name + + return self.async_create_entry( + data=self._subentry_data, + title=self._subentry_data[CONF_DEVICE][CONF_NAME], + description_placeholders={ + "entity": full_entity_name, + CONF_PLATFORM: platform, + }, + ) + + async def async_step_availability( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure availability options.""" + errors: dict[str, str] = {} + validate_field( + "availability_topic", + valid_subscribe_topic, + user_input, + errors, + "invalid_subscribe_topic", + ) + validate_field( + "availability_template", + valid_subscribe_topic_template, + user_input, + errors, + "invalid_template", + ) + if not errors and user_input is not None: + self._subentry_data.setdefault("availability", MqttAvailabilityData()) + self._subentry_data["availability"] = cast(MqttAvailabilityData, user_input) + return await self.async_step_summary_menu() + + data_schema = SUBENTRY_AVAILABILITY_SCHEMA + data_schema = self.add_suggested_values_to_schema( + data_schema, + dict(self._subentry_data.setdefault("availability", {})) + if self.source == SOURCE_RECONFIGURE + else user_input, + ) + return self.async_show_form( + step_id="availability", + data_schema=data_schema, + errors=errors, + last_step=False, + ) + + async def async_step_summary_menu( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Show summary menu and decide to add more entities or to finish the flow.""" + self._component_id = None + mqtt_device = self._subentry_data[CONF_DEVICE][CONF_NAME] + mqtt_items = ", ".join( + f"{mqtt_device} {component_data.get(CONF_NAME, '-')} ({component_data[CONF_PLATFORM]})" + for component_data in self._subentry_data["components"].values() + ) + menu_options = [ + "entity", + "update_entity", + ] + if len(self._subentry_data["components"]) > 1: + menu_options.append("delete_entity") + menu_options.extend(["device", "availability"]) + if self._subentry_data != self._get_reconfigure_subentry().data: + menu_options.append("save_changes") + return self.async_show_menu( + step_id="summary_menu", + menu_options=menu_options, + description_placeholders={ + "mqtt_device": mqtt_device, + "mqtt_items": mqtt_items, + }, + ) + + async def async_step_save_changes( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Save the changes made to the subentry.""" + entry = self._get_entry() + subentry = self._get_reconfigure_subentry() + entity_registry = er.async_get(self.hass) + + # When a component is removed from the MQTT device, + # And we save the changes to the subentry, + # we need to clean up stale entity registry entries. + # The component id is used as a part of the unique id of the entity. + for unique_id, platform in [ + ( + f"{subentry.subentry_id}_{component_id}", + subentry.data["components"][component_id][CONF_PLATFORM], + ) + for component_id in subentry.data["components"] + if component_id not in self._subentry_data["components"] + ]: + if entity_id := entity_registry.async_get_entity_id( + platform, DOMAIN, unique_id + ): + entity_registry.async_remove(entity_id) + + return self.async_update_and_abort( + entry, + subentry, + data=self._subentry_data, + title=self._subentry_data[CONF_DEVICE][CONF_NAME], + ) + + @callback def async_is_pem_data(data: bytes) -> bool: """Return True if data is in PEM format.""" diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 007b3b7e576..b2fcd492435 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -56,20 +56,55 @@ CONF_SUPPORTED_FEATURES = "supported_features" CONF_ACTION_TEMPLATE = "action_template" CONF_ACTION_TOPIC = "action_topic" +CONF_BLUE_TEMPLATE = "blue_template" +CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" +CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" +CONF_BRIGHTNESS_SCALE = "brightness_scale" +CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" +CONF_BRIGHTNESS_TEMPLATE = "brightness_template" +CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" +CONF_COLOR_MODE = "color_mode" +CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" +CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" +CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" +CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" CONF_COLOR_TEMP_KELVIN = "color_temp_kelvin" +CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" +CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" +CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" +CONF_COMMAND_OFF_TEMPLATE = "command_off_template" +CONF_COMMAND_ON_TEMPLATE = "command_on_template" CONF_CURRENT_HUMIDITY_TEMPLATE = "current_humidity_template" CONF_CURRENT_HUMIDITY_TOPIC = "current_humidity_topic" CONF_CURRENT_TEMP_TEMPLATE = "current_temperature_template" CONF_CURRENT_TEMP_TOPIC = "current_temperature_topic" CONF_ENABLED_BY_DEFAULT = "enabled_by_default" +CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" +CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" +CONF_EFFECT_LIST = "effect_list" +CONF_EFFECT_STATE_TOPIC = "effect_state_topic" +CONF_EFFECT_TEMPLATE = "effect_template" +CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" CONF_ENTITY_PICTURE = "entity_picture" +CONF_EXPIRE_AFTER = "expire_after" +CONF_FLASH_TIME_LONG = "flash_time_long" +CONF_FLASH_TIME_SHORT = "flash_time_short" +CONF_GREEN_TEMPLATE = "green_template" +CONF_HS_COMMAND_TEMPLATE = "hs_command_template" +CONF_HS_COMMAND_TOPIC = "hs_command_topic" +CONF_HS_STATE_TOPIC = "hs_state_topic" +CONF_HS_VALUE_TEMPLATE = "hs_value_template" +CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" CONF_MAX_KELVIN = "max_kelvin" +CONF_MAX_MIREDS = "max_mireds" CONF_MIN_KELVIN = "min_kelvin" +CONF_MIN_MIREDS = "min_mireds" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" CONF_MODE_COMMAND_TOPIC = "mode_command_topic" CONF_MODE_LIST = "modes" CONF_MODE_STATE_TEMPLATE = "mode_state_template" CONF_MODE_STATE_TOPIC = "mode_state_topic" +CONF_ON_COMMAND_TYPE = "on_command_type" CONF_PAYLOAD_CLOSE = "payload_close" CONF_PAYLOAD_OPEN = "payload_open" CONF_PAYLOAD_STOP = "payload_stop" @@ -78,10 +113,25 @@ CONF_POSITION_OPEN = "position_open" CONF_POWER_COMMAND_TOPIC = "power_command_topic" CONF_POWER_COMMAND_TEMPLATE = "power_command_template" CONF_PRECISION = "precision" +CONF_RED_TEMPLATE = "red_template" +CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" +CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" +CONF_RGB_STATE_TOPIC = "rgb_state_topic" +CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" +CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" +CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" +CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" +CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" +CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" +CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" +CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" +CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" CONF_STATE_CLOSED = "state_closed" CONF_STATE_CLOSING = "state_closing" CONF_STATE_OPEN = "state_open" CONF_STATE_OPENING = "state_opening" +CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" +CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" CONF_TEMP_COMMAND_TEMPLATE = "temperature_command_template" CONF_TEMP_COMMAND_TOPIC = "temperature_command_topic" CONF_TEMP_STATE_TEMPLATE = "temperature_state_template" @@ -89,7 +139,14 @@ CONF_TEMP_STATE_TOPIC = "temperature_state_topic" CONF_TEMP_INITIAL = "initial" CONF_TEMP_MAX = "max_temp" CONF_TEMP_MIN = "min_temp" +CONF_XY_COMMAND_TEMPLATE = "xy_command_template" +CONF_XY_COMMAND_TOPIC = "xy_command_topic" +CONF_XY_STATE_TOPIC = "xy_state_topic" +CONF_XY_VALUE_TEMPLATE = "xy_value_template" +CONF_WHITE_COMMAND_TOPIC = "white_command_topic" +CONF_WHITE_SCALE = "white_scale" +# Config flow constants CONF_CERTIFICATE = "certificate" CONF_CLIENT_KEY = "client_key" CONF_CLIENT_CERT = "client_cert" @@ -110,15 +167,23 @@ CONF_CONFIGURATION_URL = "configuration_url" CONF_OBJECT_ID = "object_id" CONF_SUPPORT_URL = "support_url" +DEFAULT_BRIGHTNESS = False +DEFAULT_BRIGHTNESS_SCALE = 255 DEFAULT_PREFIX = "homeassistant" DEFAULT_BIRTH_WILL_TOPIC = DEFAULT_PREFIX + "/status" DEFAULT_DISCOVERY = True +DEFAULT_EFFECT = False DEFAULT_ENCODING = "utf-8" +DEFAULT_FLASH_TIME_LONG = 10 +DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_OPTIMISTIC = False +DEFAULT_ON_COMMAND_TYPE = "last" DEFAULT_QOS = 0 DEFAULT_PAYLOAD_AVAILABLE = "online" DEFAULT_PAYLOAD_CLOSE = "CLOSE" DEFAULT_PAYLOAD_NOT_AVAILABLE = "offline" +DEFAULT_PAYLOAD_OFF = "OFF" +DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OPEN = "OPEN" DEFAULT_PORT = 1883 DEFAULT_RETAIN = False @@ -127,6 +192,7 @@ DEFAULT_WS_PATH = "/" DEFAULT_POSITION_CLOSED = 0 DEFAULT_POSITION_OPEN = 100 DEFAULT_RETAIN = False +DEFAULT_WHITE_SCALE = 255 PROTOCOL_31 = "3.1" PROTOCOL_311 = "3.1.1" diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index c93fdd9c760..428c4d0e205 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -81,6 +81,7 @@ CONF_TILT_STATUS_TOPIC = "tilt_status_topic" CONF_TILT_STATUS_TEMPLATE = "tilt_status_template" CONF_STATE_STOPPED = "state_stopped" +CONF_PAYLOAD_STOP_TILT = "payload_stop_tilt" CONF_TILT_CLOSED_POSITION = "tilt_closed_value" CONF_TILT_MAX = "tilt_max" CONF_TILT_MIN = "tilt_min" @@ -203,6 +204,9 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template, vol.Optional(CONF_TILT_COMMAND_TEMPLATE): cv.template, + vol.Optional(CONF_PAYLOAD_STOP_TILT, default=DEFAULT_PAYLOAD_STOP): vol.Any( + cv.string, None + ), } ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) @@ -592,6 +596,12 @@ class MqttCover(MqttEntity, CoverEntity): self._attr_current_cover_tilt_position = tilt_percentage self.async_write_ha_state() + async def async_stop_cover_tilt(self, **kwargs: Any) -> None: + """Stop moving the cover tilt.""" + await self.async_publish_with_config( + self._config[CONF_TILT_COMMAND_TOPIC], self._config[CONF_PAYLOAD_STOP_TILT] + ) + async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" position_percentage = kwargs[ATTR_POSITION] diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index fb047cc8d5e..8446f9041c9 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -43,7 +43,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.entity import Entity, async_generate_entity_id -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.event import ( async_track_device_registry_updated_event, async_track_entity_registry_updated_event, @@ -111,6 +111,7 @@ from .discovery import ( from .models import ( DATA_MQTT, MessageCallbackType, + MqttSubentryData, MqttValueTemplate, MqttValueTemplateException, PublishPayloadType, @@ -122,7 +123,7 @@ from .subscription import ( async_subscribe_topics_internal, async_unsubscribe_topics, ) -from .util import mqtt_config_entry_enabled +from .util import learn_more_url, mqtt_config_entry_enabled _LOGGER = logging.getLogger(__name__) @@ -238,7 +239,7 @@ def async_setup_entity_entry_helper( entry: ConfigEntry, entity_class: type[MqttEntity] | None, domain: str, - async_add_entities: AddEntitiesCallback, + async_add_entities: AddConfigEntryEntitiesCallback, discovery_schema: VolSchemaType, platform_schema_modern: VolSchemaType, schema_class_mapping: dict[str, type[MqttEntity]] | None = None, @@ -282,11 +283,10 @@ def async_setup_entity_entry_helper( @callback def _async_setup_entities() -> None: - """Set up MQTT items from configuration.yaml.""" + """Set up MQTT items from subentries and configuration.yaml.""" nonlocal entity_class mqtt_data = hass.data[DATA_MQTT] - if not (config_yaml := mqtt_data.config): - return + config_yaml = mqtt_data.config yaml_configs: list[ConfigType] = [ config for config_item in config_yaml @@ -294,6 +294,45 @@ def async_setup_entity_entry_helper( for config in configs if config_domain == domain ] + # process subentry entity setup + for config_subentry_id, subentry in entry.subentries.items(): + subentry_data = cast(MqttSubentryData, subentry.data) + availability_config = subentry_data.get("availability", {}) + subentry_entities: list[Entity] = [] + device_config = subentry_data["device"].copy() + device_mqtt_options = device_config.pop("mqtt_settings", {}) + device_config["identifiers"] = config_subentry_id + for component_id, component_data in subentry_data["components"].items(): + if component_data["platform"] != domain: + continue + component_config: dict[str, Any] = component_data.copy() + component_config[CONF_UNIQUE_ID] = ( + f"{config_subentry_id}_{component_id}" + ) + component_config[CONF_DEVICE] = device_config + component_config.pop("platform") + component_config.update(availability_config) + component_config.update(device_mqtt_options) + + try: + config = platform_schema_modern(component_config) + if schema_class_mapping is not None: + entity_class = schema_class_mapping[config[CONF_SCHEMA]] + if TYPE_CHECKING: + assert entity_class is not None + subentry_entities.append(entity_class(hass, config, entry, None)) + except vol.Invalid as exc: + _LOGGER.error( + "Schema violation occurred when trying to set up " + "entity from subentry %s %s %s: %s", + config_subentry_id, + subentry.title, + subentry.data, + exc, + ) + + async_add_entities(subentry_entities, config_subentry_id=config_subentry_id) + entities: list[Entity] = [] for yaml_config in yaml_configs: try: @@ -309,9 +348,6 @@ def async_setup_entity_entry_helper( line = getattr(yaml_config, "__line__", "?") issue_id = hex(hash(frozenset(yaml_config))) yaml_config_str = yaml_dump(yaml_config) - learn_more_url = ( - f"https://www.home-assistant.io/integrations/{domain}.mqtt/" - ) async_create_issue( hass, DOMAIN, @@ -319,7 +355,7 @@ def async_setup_entity_entry_helper( issue_domain=domain, is_fixable=False, severity=IssueSeverity.ERROR, - learn_more_url=learn_more_url, + learn_more_url=learn_more_url(domain), translation_placeholders={ "domain": domain, "config_file": config_file, diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index a2f424b247d..a950aced665 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -51,12 +51,58 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BRIGHTNESS_COMMAND_TEMPLATE, + CONF_BRIGHTNESS_COMMAND_TOPIC, + CONF_BRIGHTNESS_SCALE, + CONF_BRIGHTNESS_STATE_TOPIC, + CONF_BRIGHTNESS_VALUE_TEMPLATE, + CONF_COLOR_MODE_STATE_TOPIC, + CONF_COLOR_MODE_VALUE_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TOPIC, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_STATE_TOPIC, + CONF_COLOR_TEMP_VALUE_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_COMMAND_TEMPLATE, + CONF_EFFECT_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_STATE_TOPIC, + CONF_EFFECT_VALUE_TEMPLATE, + CONF_HS_COMMAND_TEMPLATE, + CONF_HS_COMMAND_TOPIC, + CONF_HS_STATE_TOPIC, + CONF_HS_VALUE_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_ON_COMMAND_TYPE, + CONF_RGB_COMMAND_TEMPLATE, + CONF_RGB_COMMAND_TOPIC, + CONF_RGB_STATE_TOPIC, + CONF_RGB_VALUE_TEMPLATE, + CONF_RGBW_COMMAND_TEMPLATE, + CONF_RGBW_COMMAND_TOPIC, + CONF_RGBW_STATE_TOPIC, + CONF_RGBW_VALUE_TEMPLATE, + CONF_RGBWW_COMMAND_TEMPLATE, + CONF_RGBWW_COMMAND_TOPIC, + CONF_RGBWW_STATE_TOPIC, + CONF_RGBWW_VALUE_TEMPLATE, CONF_STATE_TOPIC, CONF_STATE_VALUE_TEMPLATE, + CONF_WHITE_COMMAND_TOPIC, + CONF_WHITE_SCALE, + CONF_XY_COMMAND_TEMPLATE, + CONF_XY_COMMAND_TOPIC, + CONF_XY_STATE_TOPIC, + CONF_XY_VALUE_TEMPLATE, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_ON_COMMAND_TYPE, + DEFAULT_PAYLOAD_OFF, + DEFAULT_PAYLOAD_ON, + DEFAULT_WHITE_SCALE, PAYLOAD_NONE, ) from ..entity import MqttEntity @@ -74,47 +120,7 @@ from .schema import MQTT_LIGHT_SCHEMA_SCHEMA _LOGGER = logging.getLogger(__name__) -CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" -CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" -CONF_BRIGHTNESS_SCALE = "brightness_scale" -CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" -CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" -CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" -CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" -CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" -CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" -CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" -CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" -CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" -CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_STATE_TOPIC = "effect_state_topic" -CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" -CONF_HS_COMMAND_TEMPLATE = "hs_command_template" -CONF_HS_COMMAND_TOPIC = "hs_command_topic" -CONF_HS_STATE_TOPIC = "hs_state_topic" -CONF_HS_VALUE_TEMPLATE = "hs_value_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" -CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" -CONF_RGB_STATE_TOPIC = "rgb_state_topic" -CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" -CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" -CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" -CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" -CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" -CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" -CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" -CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" -CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" -CONF_XY_COMMAND_TEMPLATE = "xy_command_template" -CONF_XY_COMMAND_TOPIC = "xy_command_topic" -CONF_XY_STATE_TOPIC = "xy_state_topic" -CONF_XY_VALUE_TEMPLATE = "xy_value_template" -CONF_WHITE_COMMAND_TOPIC = "white_command_topic" -CONF_WHITE_SCALE = "white_scale" -CONF_ON_COMMAND_TYPE = "on_command_type" +DEFAULT_NAME = "MQTT LightEntity" MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { @@ -137,13 +143,6 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( } ) -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_NAME = "MQTT LightEntity" -DEFAULT_PAYLOAD_OFF = "OFF" -DEFAULT_PAYLOAD_ON = "ON" -DEFAULT_WHITE_SCALE = 255 -DEFAULT_ON_COMMAND_TYPE = "last" - VALUES_ON_COMMAND_TYPE = ["first", "last", "brightness"] COMMAND_TEMPLATE_KEYS = [ diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index d18da9e917a..a1f86278cf0 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -55,13 +55,26 @@ from homeassistant.util.json import json_loads_object from .. import subscription from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA from ..const import ( + CONF_COLOR_MODE, CONF_COLOR_TEMP_KELVIN, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_FLASH_TIME_LONG, + CONF_FLASH_TIME_SHORT, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, + CONF_SUPPORTED_COLOR_MODES, + DEFAULT_BRIGHTNESS, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_EFFECT, + DEFAULT_FLASH_TIME_LONG, + DEFAULT_FLASH_TIME_SHORT, + DEFAULT_WHITE_SCALE, ) from ..entity import MqttEntity from ..models import ReceiveMessage @@ -78,25 +91,7 @@ _LOGGER = logging.getLogger(__name__) DOMAIN = "mqtt_json" -DEFAULT_BRIGHTNESS = False -DEFAULT_EFFECT = False -DEFAULT_FLASH_TIME_LONG = 10 -DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_NAME = "MQTT JSON Light" -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_WHITE_SCALE = 255 - -CONF_COLOR_MODE = "color_mode" -CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" - -CONF_EFFECT_LIST = "effect_list" - -CONF_FLASH_TIME_LONG = "flash_time_long" -CONF_FLASH_TIME_SHORT = "flash_time_short" - -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" - _PLATFORM_SCHEMA_BASE = ( MQTT_RW_SCHEMA.extend( diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index 901cee6f14c..595f072416b 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -40,10 +40,21 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BLUE_TEMPLATE, + CONF_BRIGHTNESS_TEMPLATE, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_TEMPLATE, + CONF_COMMAND_OFF_TEMPLATE, + CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_TEMPLATE, + CONF_GREEN_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_RED_TEMPLATE, CONF_STATE_TOPIC, PAYLOAD_NONE, ) @@ -64,18 +75,6 @@ DOMAIN = "mqtt_template" DEFAULT_NAME = "MQTT Template Light" -CONF_BLUE_TEMPLATE = "blue_template" -CONF_BRIGHTNESS_TEMPLATE = "brightness_template" -CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" -CONF_COMMAND_OFF_TEMPLATE = "command_off_template" -CONF_COMMAND_ON_TEMPLATE = "command_on_template" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_TEMPLATE = "effect_template" -CONF_GREEN_TEMPLATE = "green_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RED_TEMPLATE = "red_template" - COMMAND_TEMPLATES = (CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_OFF_TEMPLATE) VALUE_TEMPLATES = ( CONF_BLUE_TEMPLATE, diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index 34c1f304944..8a42797b0f2 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -420,5 +420,41 @@ class MqttComponentConfig: discovery_payload: MQTTDiscoveryPayload +class DeviceMqttOptions(TypedDict, total=False): + """Hold the shared MQTT specific options for an MQTT device.""" + + qos: int + + +class MqttDeviceData(TypedDict, total=False): + """Hold the data for an MQTT device.""" + + name: str + identifiers: str + configuration_url: str + sw_version: str + hw_version: str + model: str + model_id: str + mqtt_settings: DeviceMqttOptions + + +class MqttAvailabilityData(TypedDict, total=False): + """Hold the availability configuration for a device.""" + + availability_topic: str + availability_template: str + payload_available: str + payload_not_available: str + + +class MqttSubentryData(TypedDict, total=False): + """Hold the data for a MQTT subentry.""" + + device: MqttDeviceData + components: dict[str, dict[str, Any]] + availability: MqttAvailabilityData + + DATA_MQTT: HassKey[MqttData] = HassKey("mqtt") DATA_MQTT_AVAILABLE: HassKey[asyncio.Future[bool]] = HassKey("mqtt_client_available") diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 4d67b0d56e6..b27ef68368a 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -41,7 +41,15 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA -from .const import CONF_OPTIONS, CONF_STATE_TOPIC, DOMAIN, PAYLOAD_NONE +from .const import ( + CONF_EXPIRE_AFTER, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, + DOMAIN, + PAYLOAD_NONE, +) from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA @@ -51,10 +59,6 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -CONF_EXPIRE_AFTER = "expire_after" -CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" -CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" - MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset( { sensor.ATTR_LAST_RESET, diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 4eb41b9e39a..60339347f2a 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -108,6 +108,189 @@ "invalid_inclusion": "The client certificate and private key must be configured together" } }, + "config_subentries": { + "device": { + "initiate_flow": { + "user": "Add MQTT Device", + "reconfigure": "Reconfigure MQTT Device" + }, + "entry_type": "MQTT Device", + "step": { + "availability": { + "title": "Availability options", + "description": "The availability feature allows a device to report it's availability.", + "data": { + "availability_topic": "Availability topic", + "availability_template": "Availability template", + "payload_available": "Payload available", + "payload_not_available": "Payload not available" + }, + "data_description": { + "availability_topic": "Topic to receive the availabillity payload on", + "availability_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-templates-with-the-mqtt-integration) to render the availability payload received on the availability topic", + "payload_available": "The payload that indicates the device is available (defaults to 'online')", + "payload_not_available": "The payload that indicates the device is not available (defaults to 'offline')" + } + }, + "device": { + "title": "Configure MQTT device details", + "description": "Enter the MQTT device details:", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "configuration_url": "Configuration URL", + "sw_version": "Software version", + "hw_version": "Hardware version", + "model": "Model", + "model_id": "Model ID" + }, + "data_description": { + "name": "The name of the manually added MQTT device.", + "configuration_url": "A link to the webpage that can manage the configuration of this device. Can be either a 'http://', 'https://' or an internal 'homeassistant://' URL.", + "sw_version": "The software version of the device. E.g. '2025.1.0'.", + "hw_version": "The hardware version of the device. E.g. 'v1.0 rev a'.", + "model": "E.g. 'Cleanmaster Pro'.", + "model_id": "E.g. '123NK2PRO'." + }, + "sections": { + "mqtt_settings": { + "name": "MQTT Settings", + "data": { + "qos": "QoS" + }, + "data_description": { + "qos": "The Quality of Service value the device's entities should use." + } + } + } + }, + "summary_menu": { + "title": "Reconfigure \"{mqtt_device}\"", + "description": "Entities set up:\n{mqtt_items}\n\nDecide what to do next:", + "menu_options": { + "entity": "Add another entity to \"{mqtt_device}\"", + "update_entity": "Update entity properties", + "delete_entity": "Delete an entity", + "availability": "Configure availability", + "device": "Update device properties", + "save_changes": "Save changes" + } + }, + "entity": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Configure the basic {platform_label}entity settings{entity_name_label}", + "data": { + "platform": "Type of entity", + "name": "Entity name", + "entity_picture": "Entity picture" + }, + "data_description": { + "platform": "The type of the entity to configure.", + "name": "The name of the entity. Leave empty to set it to `None` to [mark it as main feature of the MQTT device](https://www.home-assistant.io/integrations/mqtt/#naming-of-mqtt-entities).", + "entity_picture": "An URL to a picture to be assigned." + } + }, + "delete_entity": { + "title": "Delete entity", + "description": "Delete an entity. The entity will be removed from the device. Removing an entity will break any automations or scripts that depend on it.", + "data": { + "component": "Entity" + }, + "data_description": { + "component": "Select the entity you want to delete. Minimal one entity is required." + } + }, + "update_entity": { + "title": "Select entity", + "description": "Select the entity you want to update", + "data": { + "component": "Entity" + }, + "data_description": { + "component": "Select the entity you want to update." + } + }, + "entity_platform_config": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Please configure specific details for {platform} entity \"{entity}\":", + "data": { + "device_class": "Device class", + "state_class": "State class", + "unit_of_measurement": "Unit of measurement", + "options": "Add option" + }, + "data_description": { + "device_class": "The device class of the {platform} entity. [Learn more.]({url}#device_class)", + "state_class": "The [state_class](https://developers.home-assistant.io/docs/core/entity/sensor/#available-state-classes) of the sensor. [Learn more.]({url}#state_class)", + "unit_of_measurement": "Defines the unit of measurement of the sensor, if any.", + "options": "Options for allowed sensor state values. The sensor’s device_class must be set to Enumeration. The options option cannot be used together with State Class or Unit of measurement." + }, + "sections": { + "advanced_settings": { + "name": "Advanced options", + "data": { + "suggested_display_precision": "Suggested display precision" + }, + "data_description": { + "suggested_display_precision": "The number of decimals which should be used in the {platform} entity state after rounding. [Learn more.]({url}#suggested_display_precision)" + } + } + } + }, + "mqtt_platform_config": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Please configure MQTT specific details for {platform} entity \"{entity}\":", + "data": { + "command_topic": "Command topic", + "command_template": "Command template", + "state_topic": "State topic", + "value_template": "Value template", + "last_reset_value_template": "Last reset value template", + "force_update": "Force update", + "optimistic": "Optimistic", + "retain": "Retain" + }, + "data_description": { + "command_topic": "The publishing topic that will be used to control the {platform} entity. [Learn more.]({url}#command_topic)", + "command_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to render the payload to be published at the command topic.", + "state_topic": "The MQTT topic subscribed to receive {platform} state values. [Learn more.]({url}#state_topic)", + "value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the {platform} entity value.", + "last_reset_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the last reset. When Last reset template is set, the State class option must be Total. [Learn more.]({url}#last_reset_value_template)", + "force_update": "Sends update events even if the value hasn’t changed. Useful if you want to have meaningful value graphs in history. [Learn more.]({url}#force_update)", + "optimistic": "Flag that defines if the {platform} entity works in optimistic mode. [Learn more.]({url}#optimistic)", + "retain": "Select if values published by the {platform} entity should be retained at the MQTT broker." + }, + "sections": { + "advanced_settings": { + "name": "Advanced settings", + "data": { + "expire_after": "Expire after" + }, + "data_description": { + "expire_after": "If set, it defines the number of seconds after the sensor’s state expires, if it’s not updated. After expiry, the sensor’s state becomes unavailable. If not set, the sensor's state never expires. [Learn more.]({url}#expire_after)" + } + } + } + } + }, + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, + "create_entry": { + "default": "MQTT device with {platform} entity \"{entity}\" was set up successfully.\n\nNote that you can reconfigure the MQTT device at any time, e.g. to add more entities." + }, + "error": { + "invalid_input": "Invalid value", + "invalid_subscribe_topic": "Invalid subscribe topic", + "invalid_template": "Invalid template", + "invalid_uom": "The unit of measurement \"{unit_of_measurement}\" is not supported by the selected device class, please either remove the device class, select a device class which supports \"{unit_of_measurement}\", or pick a supported unit of measurement from the list", + "invalid_url": "Invalid URL", + "options_not_allowed_with_state_class_or_uom": "The 'Options' setting is not allowed when state class or unit of measurement are used", + "options_device_class_enum": "The 'Options' setting must be used with the Enumeration device class'. If you continue, the existing options will be reset", + "options_with_enum_device_class": "Configure options for the enumeration sensor", + "uom_required_for_device_class": "The selected device device class requires a unit" + } + } + }, "device_automation": { "trigger_type": { "button_short_press": "\"{subtype}\" pressed", @@ -221,12 +404,92 @@ } }, "selector": { + "device_class_sensor": { + "options": { + "apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]", + "area": "[%key:component::sensor::entity_component::area::name%]", + "aqi": "[%key:component::sensor::entity_component::aqi::name%]", + "atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]", + "battery": "[%key:component::sensor::entity_component::battery::name%]", + "blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]", + "carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", + "carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]", + "conductivity": "[%key:component::sensor::entity_component::conductivity::name%]", + "current": "[%key:component::sensor::entity_component::current::name%]", + "data_rate": "[%key:component::sensor::entity_component::data_rate::name%]", + "data_size": "[%key:component::sensor::entity_component::data_size::name%]", + "date": "[%key:component::sensor::entity_component::date::name%]", + "distance": "[%key:component::sensor::entity_component::distance::name%]", + "duration": "[%key:component::sensor::entity_component::duration::name%]", + "energy": "[%key:component::sensor::entity_component::energy::name%]", + "energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]", + "energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]", + "enum": "Enumeration", + "frequency": "[%key:component::sensor::entity_component::frequency::name%]", + "gas": "[%key:component::sensor::entity_component::gas::name%]", + "humidity": "[%key:component::sensor::entity_component::humidity::name%]", + "illuminance": "[%key:component::sensor::entity_component::illuminance::name%]", + "irradiance": "[%key:component::sensor::entity_component::irradiance::name%]", + "moisture": "[%key:component::sensor::entity_component::moisture::name%]", + "monetary": "[%key:component::sensor::entity_component::monetary::name%]", + "nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]", + "nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]", + "nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]", + "ozone": "[%key:component::sensor::entity_component::ozone::name%]", + "ph": "[%key:component::sensor::entity_component::ph::name%]", + "pm1": "[%key:component::sensor::entity_component::pm1::name%]", + "pm10": "[%key:component::sensor::entity_component::pm10::name%]", + "pm25": "[%key:component::sensor::entity_component::pm25::name%]", + "power": "[%key:component::sensor::entity_component::power::name%]", + "power_factor": "[%key:component::sensor::entity_component::power_factor::name%]", + "precipitation": "[%key:component::sensor::entity_component::precipitation::name%]", + "precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]", + "pressure": "[%key:component::sensor::entity_component::pressure::name%]", + "reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]", + "signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]", + "sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]", + "speed": "[%key:component::sensor::entity_component::speed::name%]", + "sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]", + "temperature": "[%key:component::sensor::entity_component::temperature::name%]", + "timestamp": "[%key:component::sensor::entity_component::timestamp::name%]", + "volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "voltage": "[%key:component::sensor::entity_component::voltage::name%]", + "volume": "[%key:component::sensor::entity_component::volume::name%]", + "volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]", + "volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]", + "water": "[%key:component::sensor::entity_component::water::name%]", + "weight": "[%key:component::sensor::entity_component::weight::name%]", + "wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]", + "wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]" + } + }, + "device_class_switch": { + "options": { + "outlet": "[%key:component::switch::entity_component::outlet::name%]", + "switch": "[%key:component::switch::title%]" + } + }, + "platform": { + "options": { + "notify": "Notify", + "sensor": "Sensor", + "switch": "Switch" + } + }, "set_ca_cert": { "options": { "off": "[%key:common::state::off%]", "auto": "Auto", "custom": "Custom" } + }, + "state_class": { + "options": { + "measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]", + "total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]", + "total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]" + } } }, "services": { @@ -236,7 +499,7 @@ "fields": { "evaluate_payload": { "name": "Evaluate payload", - "description": "When `payload` is a Python bytes literal, evaluate the bytes literal and publish the raw data." + "description": "If 'Payload' is a Python bytes literal, evaluate the bytes literal and publish the raw data." }, "topic": { "name": "Topic", diff --git a/homeassistant/components/mqtt/util.py b/homeassistant/components/mqtt/util.py index 27bdb4f2a35..e3996c80a8a 100644 --- a/homeassistant/components/mqtt/util.py +++ b/homeassistant/components/mqtt/util.py @@ -411,3 +411,9 @@ def migrate_certificate_file_to_content(file_name_or_auto: str) -> str | None: return certificate_file.read() except OSError: return None + + +@callback +def learn_more_url(platform: str) -> str: + """Return the URL for the platform specific MQTT documentation.""" + return f"https://www.home-assistant.io/integrations/{platform}.mqtt/" diff --git a/homeassistant/components/mullvad/config_flow.py b/homeassistant/components/mullvad/config_flow.py index c16f8879a7b..b179c5605ef 100644 --- a/homeassistant/components/mullvad/config_flow.py +++ b/homeassistant/components/mullvad/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Mullvad VPN integration.""" +import logging from typing import Any from mullvad_api import MullvadAPI, MullvadAPIError @@ -8,6 +9,8 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Mullvad VPN.""" @@ -24,7 +27,8 @@ class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(MullvadAPI) except MullvadAPIError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry(title="Mullvad VPN", data=user_input) diff --git a/homeassistant/components/mutesync/config_flow.py b/homeassistant/components/mutesync/config_flow.py index ef03df39968..a2aacfc927e 100644 --- a/homeassistant/components/mutesync/config_flow.py +++ b/homeassistant/components/mutesync/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required("host"): str}) @@ -60,7 +63,8 @@ class MuteSyncConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 3a7101e6b39..3793bed8af2 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -102,6 +102,7 @@ SENSORS: dict[str, SensorEntityDescription] = { native_unit_of_measurement=DEGREE, icon="mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "V_WEIGHT": SensorEntityDescription( key="V_WEIGHT", diff --git a/homeassistant/components/mysensors/strings.json b/homeassistant/components/mysensors/strings.json index 30fe5f46d6b..1636cb076cc 100644 --- a/homeassistant/components/mysensors/strings.json +++ b/homeassistant/components/mysensors/strings.json @@ -21,16 +21,16 @@ "device": "IP address of the gateway", "tcp_port": "[%key:common::config_flow::data::port%]", "version": "MySensors version", - "persistence_file": "persistence file (leave empty to auto-generate)" + "persistence_file": "Persistence file (leave empty to auto-generate)" } }, "gw_serial": { "description": "Serial gateway setup", "data": { "device": "Serial port", - "baud_rate": "baud rate", + "baud_rate": "Baud rate", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "Persistence file (leave empty to auto-generate)" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } }, "gw_mqtt": { @@ -40,7 +40,7 @@ "topic_in_prefix": "Prefix for input topics (topic_in_prefix)", "topic_out_prefix": "Prefix for output topics (topic_out_prefix)", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "[%key:component::mysensors::config::step::gw_serial::data::persistence_file%]" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } } }, diff --git a/homeassistant/components/nam/__init__.py b/homeassistant/components/nam/__init__.py index 6b4ca6ff324..d297443c059 100644 --- a/homeassistant/components/nam/__init__.py +++ b/homeassistant/components/nam/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from aiohttp.client_exceptions import ClientConnectorError, ClientError +from aiohttp.client_exceptions import ClientError from nettigo_air_monitor import ( ApiError, AuthFailedError, @@ -38,15 +38,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: NAMConfigEntry) -> bool: options = ConnectionOptions(host=host, username=username, password=password) try: nam = await NettigoAirMonitor.create(websession, options) - except (ApiError, ClientError, ClientConnectorError, TimeoutError) as err: - raise ConfigEntryNotReady from err + except (ApiError, ClientError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err try: await nam.async_check_credentials() - except ApiError as err: - raise ConfigEntryNotReady from err + except (ApiError, ClientError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except AuthFailedError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err coordinator = NAMDataUpdateCoordinator(hass, entry, nam) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/nam/button.py b/homeassistant/components/nam/button.py index 60145e4fe27..791a5fdc27c 100644 --- a/homeassistant/components/nam/button.py +++ b/homeassistant/components/nam/button.py @@ -4,6 +4,9 @@ from __future__ import annotations import logging +from aiohttp.client_exceptions import ClientError +from nettigo_air_monitor import ApiError, AuthFailedError + from homeassistant.components.button import ( ButtonDeviceClass, ButtonEntity, @@ -11,9 +14,11 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from .const import DOMAIN from .coordinator import NAMConfigEntry, NAMDataUpdateCoordinator PARALLEL_UPDATES = 1 @@ -59,4 +64,16 @@ class NAMButton(CoordinatorEntity[NAMDataUpdateCoordinator], ButtonEntity): async def async_press(self) -> None: """Triggers the restart.""" - await self.coordinator.nam.async_restart() + try: + await self.coordinator.nam.async_restart() + except (ApiError, ClientError) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.config_entry.title, + }, + ) from err + except AuthFailedError: + self.coordinator.config_entry.async_start_reauth(self.hass) diff --git a/homeassistant/components/nam/const.py b/homeassistant/components/nam/const.py index 4b7b50b309a..2dedcf3c68a 100644 --- a/homeassistant/components/nam/const.py +++ b/homeassistant/components/nam/const.py @@ -11,6 +11,7 @@ SUFFIX_P1: Final = "_p1" SUFFIX_P2: Final = "_p2" SUFFIX_P4: Final = "_p4" +ATTR_BH1750_ILLUMINANCE: Final = "bh1750_illuminance" ATTR_BME280_HUMIDITY: Final = "bme280_humidity" ATTR_BME280_PRESSURE: Final = "bme280_pressure" ATTR_BME280_TEMPERATURE: Final = "bme280_temperature" diff --git a/homeassistant/components/nam/coordinator.py b/homeassistant/components/nam/coordinator.py index 3e2c9c24474..8a898dee378 100644 --- a/homeassistant/components/nam/coordinator.py +++ b/homeassistant/components/nam/coordinator.py @@ -64,6 +64,10 @@ class NAMDataUpdateCoordinator(DataUpdateCoordinator[NAMSensors]): # We do not need to catch AuthFailed exception here because sensor data is # always available without authorization. except (ApiError, InvalidSensorDataError, RetryError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.config_entry.title}, + ) from error return data diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index c3a559de50b..1c3b9db7a86 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -7,7 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], - "requirements": ["nettigo-air-monitor==4.0.0"], + "requirements": ["nettigo-air-monitor==4.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/nam/sensor.py b/homeassistant/components/nam/sensor.py index 4478507dc59..45cfd313e8f 100644 --- a/homeassistant/components/nam/sensor.py +++ b/homeassistant/components/nam/sensor.py @@ -19,6 +19,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -33,6 +34,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .const import ( + ATTR_BH1750_ILLUMINANCE, ATTR_BME280_HUMIDITY, ATTR_BME280_PRESSURE, ATTR_BME280_TEMPERATURE, @@ -83,6 +85,15 @@ class NAMSensorEntityDescription(SensorEntityDescription): SENSORS: tuple[NAMSensorEntityDescription, ...] = ( + NAMSensorEntityDescription( + key=ATTR_BH1750_ILLUMINANCE, + translation_key="bh1750_illuminance", + suggested_display_precision=0, + native_unit_of_measurement=LIGHT_LUX, + device_class=SensorDeviceClass.ILLUMINANCE, + state_class=SensorStateClass.MEASUREMENT, + value=lambda sensors: sensors.bh1750_illuminance, + ), NAMSensorEntityDescription( key=ATTR_BME280_HUMIDITY, translation_key="bme280_humidity", diff --git a/homeassistant/components/nam/strings.json b/homeassistant/components/nam/strings.json index 2caa4d8bd97..000dfe74112 100644 --- a/homeassistant/components/nam/strings.json +++ b/homeassistant/components/nam/strings.json @@ -54,6 +54,9 @@ }, "entity": { "sensor": { + "bh1750_illuminance": { + "name": "BH1750 illuminance" + }, "bme280_humidity": { "name": "BME280 humidity" }, @@ -98,6 +101,17 @@ "medium": "Medium", "high": "High", "very_high": "Very high" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "pmsx003_pm1": { @@ -120,6 +134,17 @@ "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "sds011_pm10": { @@ -145,6 +170,17 @@ "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "sps30_pm1": { @@ -169,5 +205,19 @@ "name": "Last restart" } } + }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {device}, please update your credentials" + }, + "device_communication_error": { + "message": "An error occurred while communicating with {device}" + }, + "device_communication_action_error": { + "message": "An error occurred while calling action for {entity} for {device}" + }, + "update_error": { + "message": "An error occurred while retrieving data from {device}" + } } } diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py index 3a9ad3f7d49..298210903dc 100644 --- a/homeassistant/components/nasweb/config_flow.py +++ b/homeassistant/components/nasweb/config_flow.py @@ -103,7 +103,7 @@ class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): errors["base"] = "missing_status" except AbortFlow: raise - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index 146b6f2479e..a3d2901e911 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -20,8 +20,10 @@ from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass +import datetime import logging import os +import pathlib from typing import Any from google_nest_sdm.camera_traits import CameraClipPreviewTrait, CameraEventImageTrait @@ -46,6 +48,7 @@ from homeassistant.components.media_source import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.storage import Store from homeassistant.helpers.template import DATE_STR_FORMAT from homeassistant.util import dt as dt_util @@ -72,6 +75,9 @@ MEDIA_PATH = f"{DOMAIN}/event_media" # Size of small in-memory disk cache to avoid excessive disk reads DISK_READ_LRU_MAX_SIZE = 32 +# Remove orphaned media files that are older than this age +ORPHANED_MEDIA_AGE_CUTOFF = datetime.timedelta(days=7) + async def async_get_media_event_store( hass: HomeAssistant, subscriber: GoogleNestSubscriber @@ -123,6 +129,12 @@ class NestEventMediaStore(EventMediaStore): self._media_path = media_path self._data: dict[str, Any] | None = None self._devices: Mapping[str, str] | None = {} + # Invoke garbage collection for orphaned files one per + async_track_time_interval( + hass, + self.async_remove_orphaned_media, + datetime.timedelta(days=1), + ) async def async_load(self) -> dict | None: """Load data.""" @@ -249,6 +261,68 @@ class NestEventMediaStore(EventMediaStore): devices[device.name] = device_entry.id return devices + async def async_remove_orphaned_media(self, now: datetime.datetime) -> None: + """Remove any media files that are orphaned and not referenced by the active event data. + + The event media store handles garbage collection, but there may be cases where files are + left around or unable to be removed. This is a scheduled event that will also check for + old orphaned files and remove them when the events are not referenced in the active list + of event data. + + Event media files are stored with the format -.suffix. We extract + the list of valid timestamps from the event data and remove any files that are not in that list + or are older than the cutoff time. + """ + _LOGGER.debug("Checking for orphaned media at %s", now) + + def _cleanup(event_timestamps: dict[str, set[int]]) -> None: + time_cutoff = (now - ORPHANED_MEDIA_AGE_CUTOFF).timestamp() + media_path = pathlib.Path(self._media_path) + for device_id, valid_timestamps in event_timestamps.items(): + media_files = list(media_path.glob(f"{device_id}/*")) + _LOGGER.debug("Found %d files (device=%s)", len(media_files), device_id) + for media_file in media_files: + if "-" not in media_file.name: + continue + try: + timestamp = int(media_file.name.split("-")[0]) + except ValueError: + continue + if timestamp in valid_timestamps or timestamp > time_cutoff: + continue + _LOGGER.debug("Removing orphaned media file: %s", media_file) + try: + os.remove(media_file) + except OSError as err: + _LOGGER.error( + "Unable to remove orphaned media file: %s %s", + media_file, + err, + ) + + # Nest device id mapped to home assistant device id + event_timestamps = await self._get_valid_event_timestamps() + await self._hass.async_add_executor_job(_cleanup, event_timestamps) + + async def _get_valid_event_timestamps(self) -> dict[str, set[int]]: + """Return a mapping of home assistant device id to valid timestamps.""" + device_map = await self._get_devices() + event_data = await self.async_load() or {} + valid_device_timestamps = {} + for nest_device_id, device_id in device_map.items(): + if (device_events := event_data.get(nest_device_id, {})) is None: + continue + valid_device_timestamps[device_id] = { + int( + datetime.datetime.fromisoformat( + camera_event["timestamp"] + ).timestamp() + ) + for events in device_events + for camera_event in events["events"].values() + } + return valid_device_timestamps + async def async_get_media_source(hass: HomeAssistant) -> MediaSource: """Set up Nest media source.""" diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index d853694ffea..02d9c2fa3a6 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -135,7 +135,7 @@ class NetatmoOptionsFlowHandler(OptionsFlow): vol.Optional( CONF_WEATHER_AREAS, default=weather_areas, - ): cv.multi_select({wa: None for wa in weather_areas}), + ): cv.multi_select(dict.fromkeys(weather_areas)), vol.Optional(CONF_NEW_AREA): str, } ) diff --git a/homeassistant/components/netatmo/sensor.py b/homeassistant/components/netatmo/sensor.py index 5f8084d542c..56b8233912f 100644 --- a/homeassistant/components/netatmo/sensor.py +++ b/homeassistant/components/netatmo/sensor.py @@ -213,7 +213,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="wind_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="windstrength", @@ -235,7 +236,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="guststrength", @@ -345,7 +347,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ key="windangle_value", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_wind_angles(), ), NetatmoPublicWeatherSensorEntityDescription( @@ -360,7 +363,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ translation_key="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_gust_angles(), ), NetatmoPublicWeatherSensorEntityDescription( diff --git a/homeassistant/components/nexia/manifest.json b/homeassistant/components/nexia/manifest.json index 09b79d37c55..e7ab63d4712 100644 --- a/homeassistant/components/nexia/manifest.json +++ b/homeassistant/components/nexia/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/nexia", "iot_class": "cloud_polling", "loggers": ["nexia"], - "requirements": ["nexia==2.2.2"] + "requirements": ["nexia==2.4.0"] } diff --git a/homeassistant/components/nexia/strings.json b/homeassistant/components/nexia/strings.json index 05d86d3a495..43da2cf05c7 100644 --- a/homeassistant/components/nexia/strings.json +++ b/homeassistant/components/nexia/strings.json @@ -86,8 +86,8 @@ } }, "set_hvac_run_mode": { - "name": "Set hvac run mode", - "description": "Sets the HVAC operation mode.", + "name": "Set HVAC run mode", + "description": "Sets the run and/or operation mode of the HVAC system.", "fields": { "run_mode": { "name": "Run mode", diff --git a/homeassistant/components/nextcloud/strings.json b/homeassistant/components/nextcloud/strings.json index 9b22a6924bc..ef4e3de0f62 100644 --- a/homeassistant/components/nextcloud/strings.json +++ b/homeassistant/components/nextcloud/strings.json @@ -88,7 +88,7 @@ "name": "Cache start time" }, "nextcloud_cache_ttl": { - "name": "Cache ttl" + "name": "Cache TTL" }, "nextcloud_database_size": { "name": "Database size" @@ -268,13 +268,13 @@ "name": "Updates available" }, "nextcloud_system_cpuload_1": { - "name": "CPU Load last 1 minute" + "name": "CPU load last 1 minute" }, "nextcloud_system_cpuload_15": { - "name": "CPU Load last 15 minutes" + "name": "CPU load last 15 minutes" }, "nextcloud_system_cpuload_5": { - "name": "CPU Load last 5 minutes" + "name": "CPU load last 5 minutes" }, "nextcloud_system_freespace": { "name": "Free space" diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 478ff215c30..eb8bd26cb9b 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -36,6 +36,7 @@ from .const import ( ATTR_SETTINGS, ATTR_STATUS, CONF_PROFILE_ID, + DOMAIN, UPDATE_INTERVAL_ANALYTICS, UPDATE_INTERVAL_CONNECTION, UPDATE_INTERVAL_SETTINGS, @@ -88,9 +89,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: NextDnsConfigEntry) -> b try: nextdns = await NextDns.create(websession, api_key) except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err except InvalidApiKeyError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"entry": entry.title}, + ) from err tasks = [] coordinators = {} diff --git a/homeassistant/components/nextdns/button.py b/homeassistant/components/nextdns/button.py index b36c243a463..2adccaa304f 100644 --- a/homeassistant/components/nextdns/button.py +++ b/homeassistant/components/nextdns/button.py @@ -2,15 +2,19 @@ from __future__ import annotations -from nextdns import AnalyticsStatus +from aiohttp import ClientError +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import AnalyticsStatus, ApiError, InvalidApiKeyError from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import NextDnsConfigEntry +from .const import DOMAIN from .coordinator import NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 @@ -53,4 +57,21 @@ class NextDnsButton( async def async_press(self) -> None: """Trigger cleaning logs.""" - await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id) + try: + await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id) + except ( + ApiError, + ClientConnectorError, + TimeoutError, + ClientError, + ) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="method_error", + translation_placeholders={ + "entity": self.entity_id, + "error": repr(err), + }, + ) from err + except InvalidApiKeyError: + self.coordinator.config_entry.async_start_reauth(self.hass) diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index d3327c4c08b..d36064d8fb0 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aiohttp.client_exceptions import ClientConnectorError @@ -19,6 +20,8 @@ from .const import CONF_PROFILE_ID, DOMAIN AUTH_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) +_LOGGER = logging.getLogger(__name__) + async def async_init_nextdns(hass: HomeAssistant, api_key: str) -> NextDns: """Check if credentials are valid.""" @@ -51,7 +54,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return await self.async_step_profiles() @@ -111,7 +115,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py index 850702e4488..41f6ff43a2a 100644 --- a/homeassistant/components/nextdns/coordinator.py +++ b/homeassistant/components/nextdns/coordinator.py @@ -79,9 +79,20 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): ClientConnectorError, RetryError, ) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(err), + }, + ) from err except InvalidApiKeyError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"entry": self.config_entry.title}, + ) from err async def _async_update_data_internal(self) -> CoordinatorDataT: """Update data via library.""" diff --git a/homeassistant/components/nextdns/strings.json b/homeassistant/components/nextdns/strings.json index f2a5fa2816d..38944a0711e 100644 --- a/homeassistant/components/nextdns/strings.json +++ b/homeassistant/components/nextdns/strings.json @@ -359,5 +359,19 @@ "name": "Force YouTube restricted mode" } } + }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {entry}, please update your API key" + }, + "cannot_connect": { + "message": "An error occurred while connecting to the NextDNS API for {entry}: {error}" + }, + "method_error": { + "message": "An error occurred while calling the NextDNS API method for {entity}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the NextDNS API for {entry}: {error}" + } } } diff --git a/homeassistant/components/nextdns/switch.py b/homeassistant/components/nextdns/switch.py index b7c77bd9dbd..8bdca76b955 100644 --- a/homeassistant/components/nextdns/switch.py +++ b/homeassistant/components/nextdns/switch.py @@ -8,7 +8,7 @@ from typing import Any from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ApiError, Settings +from nextdns import ApiError, InvalidApiKeyError, Settings from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -18,6 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import NextDnsConfigEntry +from .const import DOMAIN from .coordinator import NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 @@ -582,9 +583,16 @@ class NextDnsSwitch( ClientError, ) as err: raise HomeAssistantError( - "NextDNS API returned an error calling set_setting for" - f" {self.entity_id}: {err}" + translation_domain=DOMAIN, + translation_key="method_error", + translation_placeholders={ + "entity": self.entity_id, + "error": repr(err), + }, ) from err + except InvalidApiKeyError: + self.coordinator.config_entry.async_start_reauth(self.hass) + return if result: self._attr_is_on = new_state diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index 049ba905f04..a8441fb90d8 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.14.0"] + "requirements": ["nibe==2.17.0"] } diff --git a/homeassistant/components/nibe_heatpump/sensor.py b/homeassistant/components/nibe_heatpump/sensor.py index ac4f9eba308..54cd0f7ea34 100644 --- a/homeassistant/components/nibe_heatpump/sensor.py +++ b/homeassistant/components/nibe_heatpump/sensor.py @@ -13,14 +13,17 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + PERCENTAGE, EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfPressure, UnitOfTemperature, UnitOfTime, + UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -114,6 +117,20 @@ UNIT_DESCRIPTIONS = { state_class=SensorStateClass.TOTAL_INCREASING, native_unit_of_measurement=UnitOfTime.HOURS, ), + "min": SensorEntityDescription( + key="min", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), + "s": SensorEntityDescription( + key="s", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.SECONDS, + ), "Hz": SensorEntityDescription( key="Hz", entity_category=EntityCategory.DIAGNOSTIC, @@ -121,6 +138,48 @@ UNIT_DESCRIPTIONS = { state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.HERTZ, ), + "Pa": SensorEntityDescription( + key="Pa", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.PA, + ), + "kPa": SensorEntityDescription( + key="kPa", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.KPA, + ), + "bar": SensorEntityDescription( + key="bar", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + ), + "l/m": SensorEntityDescription( + key="l/m", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + ), + "m³/h": SensorEntityDescription( + key="m³/h", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + ), + "%RH": SensorEntityDescription( + key="%RH", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + ), } diff --git a/homeassistant/components/nibe_heatpump/strings.json b/homeassistant/components/nibe_heatpump/strings.json index 6fa421e0855..c65a76d3364 100644 --- a/homeassistant/components/nibe_heatpump/strings.json +++ b/homeassistant/components/nibe_heatpump/strings.json @@ -10,13 +10,13 @@ }, "modbus": { "data": { - "model": "Model of Heat Pump", + "model": "Model of heat pump", "modbus_url": "Modbus URL", - "modbus_unit": "Modbus Unit Identifier" + "modbus_unit": "Modbus unit identifier" }, "data_description": { - "modbus_url": "Modbus URL that describes the connection to your Heat Pump or MODBUS40 unit. It should be on the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote telnet based Modbus RTU connection.", - "modbus_unit": "Unit identification for your Heat Pump. Can usually be left at 0." + "modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote Telnet-based Modbus RTU connection.", + "modbus_unit": "Unit identification for your heat pump. Can usually be left at 0." } }, "nibegw": { diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py index f37e5e9248a..76e71bc1690 100644 --- a/homeassistant/components/niko_home_control/config_flow.py +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from nhc.controller import NHCController @@ -12,6 +13,8 @@ from homeassistant.const import CONF_HOST from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): str, @@ -25,7 +28,8 @@ async def test_connection(host: str) -> str | None: controller = NHCController(host, 8000) try: await controller.connect() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "cannot_connect" return None diff --git a/homeassistant/components/nobo_hub/strings.json b/homeassistant/components/nobo_hub/strings.json index 28be01862e9..1059934e896 100644 --- a/homeassistant/components/nobo_hub/strings.json +++ b/homeassistant/components/nobo_hub/strings.json @@ -44,7 +44,7 @@ "entity": { "select": { "global_override": { - "name": "global override", + "name": "Global override", "state": { "away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]", "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", @@ -53,7 +53,7 @@ } }, "week_profile": { - "name": "week profile" + "name": "Week profile" } } } diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index cc10a1a0640..7b33f032de1 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -15,7 +15,7 @@ }, "data_description": { "currency": "Select currency to display prices in, EUR is the base currency.", - "areas": "Areas to display prices for according to Nordpool market areas." + "areas": "Areas to display prices for according to Nord Pool market areas." } }, "reconfigure": { @@ -95,11 +95,11 @@ "services": { "get_prices_for_date": { "name": "Get prices for date", - "description": "Retrieve the prices for a specific date.", + "description": "Retrieves the prices for a specific date.", "fields": { "config_entry": { - "name": "Select Nord Pool configuration entry", - "description": "Choose the configuration entry." + "name": "Config entry", + "description": "The Nord Pool configuration entry for this action." }, "date": { "name": "Date", diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 61a4fa644b0..f44a510b1c0 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -159,7 +159,7 @@ class NumberDeviceClass(StrEnum): DURATION = "duration" """Fixed duration. - Unit of measurement: `d`, `h`, `min`, `s`, `ms` + Unit of measurement: `d`, `h`, `min`, `s`, `ms`, `µs` """ ENERGY = "energy" @@ -462,6 +462,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { UnitOfTime.MINUTES, UnitOfTime.SECONDS, UnitOfTime.MILLISECONDS, + UnitOfTime.MICROSECONDS, }, NumberDeviceClass.ENERGY: set(UnitOfEnergy), NumberDeviceClass.ENERGY_DISTANCE: set(UnitOfEnergyDistance), @@ -486,6 +487,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, NumberDeviceClass.POWER: { + UnitOfPower.MILLIWATT, UnitOfPower.WATT, UnitOfPower.KILO_WATT, UnitOfPower.MEGA_WATT, diff --git a/homeassistant/components/number/services.yaml b/homeassistant/components/number/services.yaml index dcbb955d739..6a7083a7613 100644 --- a/homeassistant/components/number/services.yaml +++ b/homeassistant/components/number/services.yaml @@ -7,5 +7,6 @@ set_value: fields: value: example: 42 + required: true selector: text: diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 169dbbbff5d..5b188868819 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -103,7 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: ) status = coordinator.data - _LOGGER.debug("NUT Sensors Available: %s", status) + _LOGGER.debug("NUT Sensors Available: %s", status if status else None) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) unique_id = _unique_id_from_status(status) @@ -111,14 +111,34 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: unique_id = entry.entry_id if username is not None and password is not None: + # Dynamically add outlet integration commands + additional_integration_commands = set() + if (num_outlets := status.get("outlet.count")) is not None: + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str: str = str(outlet_num) + additional_integration_commands |= { + f"outlet.{outlet_num_str}.load.cycle", + f"outlet.{outlet_num_str}.load.on", + f"outlet.{outlet_num_str}.load.off", + } + + valid_integration_commands = ( + INTEGRATION_SUPPORTED_COMMANDS | additional_integration_commands + ) + user_available_commands = { - device_supported_command - for device_supported_command in await data.async_list_commands() or {} - if device_supported_command in INTEGRATION_SUPPORTED_COMMANDS + device_command + for device_command in await data.async_list_commands() or {} + if device_command in valid_integration_commands } else: user_available_commands = set() + _LOGGER.debug( + "NUT Commands Available: %s", + user_available_commands if user_available_commands else None, + ) + entry.runtime_data = NutRuntimeData( coordinator, data, unique_id, user_available_commands ) @@ -240,6 +260,7 @@ class PyNUTData: self._client = AIONUTClient(self._host, port, username, password, 5, persistent) self.ups_list: dict[str, str] | None = None + self.device_name: str | None = None self._status: dict[str, str] | None = None self._device_info: NUTDeviceInfo | None = None @@ -250,7 +271,7 @@ class PyNUTData: @property def name(self) -> str: - """Return the name of the ups.""" + """Return the name of the NUT device.""" return self._alias or f"Nut-{self._host}" @property @@ -294,6 +315,8 @@ class PyNUTData: self._status = await self._async_get_status() if self._device_info is None: self._device_info = self._get_device_info() + if self.device_name is None: + self.device_name = self.name.title() return self._status async def async_run_command(self, command_name: str) -> None: diff --git a/homeassistant/components/nut/button.py b/homeassistant/components/nut/button.py new file mode 100644 index 00000000000..0708056b2e3 --- /dev/null +++ b/homeassistant/components/nut/button.py @@ -0,0 +1,67 @@ +"""Provides a switch for switchable NUT outlets.""" + +from __future__ import annotations + +import logging + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import NutConfigEntry +from .entity import NUTBaseEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NutConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the NUT buttons.""" + pynut_data = config_entry.runtime_data + coordinator = pynut_data.coordinator + status = coordinator.data + + # Dynamically add outlet button types + if (num_outlets := status.get("outlet.count")) is None: + return + + data = pynut_data.data + unique_id = pynut_data.unique_id + valid_button_types: dict[str, ButtonEntityDescription] = {} + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str = str(outlet_num) + outlet_name: str = status.get(f"outlet.{outlet_num_str}.name") or outlet_num_str + valid_button_types |= { + f"outlet.{outlet_num_str}.load.cycle": ButtonEntityDescription( + key=f"outlet.{outlet_num_str}.load.cycle", + translation_key="outlet_number_load_cycle", + translation_placeholders={"outlet_name": outlet_name}, + device_class=ButtonDeviceClass.RESTART, + entity_registry_enabled_default=True, + ), + } + + async_add_entities( + NUTButton(coordinator, description, data, unique_id) + for button_id, description in valid_button_types.items() + if button_id in pynut_data.user_available_commands + ) + + +class NUTButton(NUTBaseEntity, ButtonEntity): + """Representation of a button entity for NUT.""" + + async def async_press(self) -> None: + """Press the button.""" + name_list = self.entity_description.key.split(".") + command_name = f"{name_list[0]}.{name_list[1]}.load.cycle" + await self.pynut_data.async_run_command(command_name) diff --git a/homeassistant/components/nut/const.py b/homeassistant/components/nut/const.py index e67299aa9a3..d741d8e95f9 100644 --- a/homeassistant/components/nut/const.py +++ b/homeassistant/components/nut/const.py @@ -6,7 +6,11 @@ from homeassistant.const import Platform DOMAIN = "nut" -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [ + Platform.BUTTON, + Platform.SENSOR, + Platform.SWITCH, +] DEFAULT_NAME = "NUT UPS" DEFAULT_HOST = "localhost" @@ -63,10 +67,6 @@ COMMAND_TEST_FAILURE_STOP = "test.failure.stop" COMMAND_TEST_PANEL_START = "test.panel.start" COMMAND_TEST_PANEL_STOP = "test.panel.stop" COMMAND_TEST_SYSTEM_START = "test.system.start" -COMMAND_OUTLET_1_LOAD_OFF = "outlet.1.load.off" -COMMAND_OUTLET_1_LOAD_ON = "outlet.1.load.on" -COMMAND_OUTLET_2_LOAD_OFF = "outlet.2.load.off" -COMMAND_OUTLET_2_LOAD_ON = "outlet.2.load.on" INTEGRATION_SUPPORTED_COMMANDS = { COMMAND_BEEPER_DISABLE, @@ -95,8 +95,4 @@ INTEGRATION_SUPPORTED_COMMANDS = { COMMAND_TEST_PANEL_START, COMMAND_TEST_PANEL_STOP, COMMAND_TEST_SYSTEM_START, - COMMAND_OUTLET_1_LOAD_OFF, - COMMAND_OUTLET_1_LOAD_ON, - COMMAND_OUTLET_2_LOAD_OFF, - COMMAND_OUTLET_2_LOAD_ON, } diff --git a/homeassistant/components/nut/entity.py b/homeassistant/components/nut/entity.py new file mode 100644 index 00000000000..e6536d8aad6 --- /dev/null +++ b/homeassistant/components/nut/entity.py @@ -0,0 +1,67 @@ +"""Base entity for the NUT integration.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import cast + +from homeassistant.const import ( + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_SERIAL_NUMBER, + ATTR_SW_VERSION, +) +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from . import PyNUTData +from .const import DOMAIN + +NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { + "manufacturer": ATTR_MANUFACTURER, + "model": ATTR_MODEL, + "firmware": ATTR_SW_VERSION, + "serial": ATTR_SERIAL_NUMBER, +} + + +class NUTBaseEntity(CoordinatorEntity[DataUpdateCoordinator]): + """NUT base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DataUpdateCoordinator, + entity_description: EntityDescription, + data: PyNUTData, + unique_id: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + self._attr_unique_id = f"{unique_id}_{entity_description.key}" + + self.pynut_data = data + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=self.pynut_data.device_name, + ) + self._attr_device_info.update(_get_nut_device_info(data)) + + +def _get_nut_device_info(data: PyNUTData) -> DeviceInfo: + """Return a DeviceInfo object filled with NUT device info.""" + nut_dev_infos = asdict(data.device_info) + nut_infos = { + info_key: nut_dev_infos[nut_key] + for nut_key, info_key in NUT_DEV_INFO_TO_DEV_INFO.items() + if nut_dev_infos[nut_key] is not None + } + + return cast(DeviceInfo, nut_infos) diff --git a/homeassistant/components/nut/icons.json b/homeassistant/components/nut/icons.json index 261d28d712f..a795368005c 100644 --- a/homeassistant/components/nut/icons.json +++ b/homeassistant/components/nut/icons.json @@ -1,5 +1,10 @@ { "entity": { + "button": { + "outlet_number_load_cycle": { + "default": "mdi:restart" + } + }, "sensor": { "ambient_humidity_status": { "default": "mdi:information-outline" @@ -37,11 +42,26 @@ "battery_packs_bad": { "default": "mdi:information-outline" }, + "battery_runtime": { + "default": "mdi:clock-outline" + }, + "battery_runtime_low": { + "default": "mdi:clock-alert-outline" + }, + "battery_runtime_restart": { + "default": "mdi:clock-start" + }, "battery_type": { "default": "mdi:information-outline" }, + "battery_voltage_high": { + "default": "mdi:battery-high" + }, + "battery_voltage_low": { + "default": "mdi:battery-low" + }, "input_bypass_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "input_current_status": { "default": "mdi:information-outline" @@ -50,13 +70,10 @@ "default": "mdi:information-outline" }, "input_load": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "input_phases": { - "default": "mdi:information-outline" - }, - "input_power": { - "default": "mdi:gauge" + "default": "mdi:sine-wave" }, "input_sensitivity": { "default": "mdi:information-outline" @@ -67,20 +84,23 @@ "input_voltage_status": { "default": "mdi:information-outline" }, - "outlet_voltage": { - "default": "mdi:gauge" + "outlet_number_current_status": { + "default": "mdi:information-outline" + }, + "outlet_number_desc": { + "default": "mdi:information-outline" }, "output_l1_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l2_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l3_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "ups_alarm": { "default": "mdi:alarm" @@ -91,20 +111,29 @@ "ups_contacts": { "default": "mdi:information-outline" }, + "ups_delay_reboot": { + "default": "mdi:timelapse" + }, + "ups_delay_shutdown": { + "default": "mdi:timelapse" + }, + "ups_delay_start": { + "default": "mdi:timelapse" + }, "ups_display_language": { "default": "mdi:information-outline" }, "ups_efficiency": { - "default": "mdi:gauge" + "default": "mdi:percent-outline" }, "ups_id": { "default": "mdi:information-outline" }, "ups_load": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_load_high": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_shutdown": { "default": "mdi:information-outline" @@ -127,15 +156,32 @@ "ups_test_date": { "default": "mdi:calendar" }, + "ups_test_interval": { + "default": "mdi:timelapse" + }, "ups_test_result": { "default": "mdi:information-outline" }, + "ups_timer_reboot": { + "default": "mdi:timer-refresh-outline" + }, + "ups_timer_shutdown": { + "default": "mdi:timer-stop-outline" + }, + "ups_timer_start": { + "default": "mdi:timer-play-outline" + }, "ups_type": { "default": "mdi:information-outline" }, "ups_watchdog_status": { "default": "mdi:information-outline" } + }, + "switch": { + "outlet_number_load_poweronoff": { + "default": "mdi:power" + } } } } diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 1484f11dac7..5bf7958e39e 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -1,10 +1,9 @@ -"""Provides a sensor to track various status aspects of a UPS.""" +"""Provides a sensor to track various status aspects of a NUT device.""" from __future__ import annotations -from dataclasses import asdict import logging -from typing import Final, cast +from typing import Final from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,10 +12,6 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_SERIAL_NUMBER, - ATTR_SW_VERSION, PERCENTAGE, STATE_UNKNOWN, EntityCategory, @@ -29,22 +24,14 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) -from . import NutConfigEntry, PyNUTData -from .const import DOMAIN, KEY_STATUS, KEY_STATUS_DISPLAY, STATE_TYPES +from . import NutConfigEntry +from .const import KEY_STATUS, KEY_STATUS_DISPLAY, STATE_TYPES +from .entity import NUTBaseEntity -NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { - "manufacturer": ATTR_MANUFACTURER, - "model": ATTR_MODEL, - "firmware": ATTR_SW_VERSION, - "serial": ATTR_SERIAL_NUMBER, -} +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 AMBIENT_PRESENT = "ambient.present" AMBIENT_SENSORS = { @@ -65,51 +52,752 @@ _LOGGER = logging.getLogger(__name__) SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { - "ups.status.display": SensorEntityDescription( - key="ups.status.display", - translation_key="ups_status_display", + "ambient.humidity": SensorEntityDescription( + key="ambient.humidity", + translation_key="ambient_humidity", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.status": SensorEntityDescription( - key="ups.status", - translation_key="ups_status", + "ambient.humidity.status": SensorEntityDescription( + key="ambient.humidity.status", + translation_key="ambient_humidity_status", + device_class=SensorDeviceClass.ENUM, + options=AMBIENT_THRESHOLD_STATUS_OPTIONS, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.alarm": SensorEntityDescription( - key="ups.alarm", - translation_key="ups_alarm", + "ambient.temperature": SensorEntityDescription( + key="ambient.temperature", + translation_key="ambient_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.temperature": SensorEntityDescription( - key="ups.temperature", - translation_key="ups_temperature", + "ambient.temperature.status": SensorEntityDescription( + key="ambient.temperature.status", + translation_key="ambient_temperature_status", + device_class=SensorDeviceClass.ENUM, + options=AMBIENT_THRESHOLD_STATUS_OPTIONS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + "battery.alarm.threshold": SensorEntityDescription( + key="battery.alarm.threshold", + translation_key="battery_alarm_threshold", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.capacity": SensorEntityDescription( + key="battery.capacity", + translation_key="battery_capacity", + native_unit_of_measurement="Ah", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge": SensorEntityDescription( + key="battery.charge", + translation_key="battery_charge", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + ), + "battery.charge.low": SensorEntityDescription( + key="battery.charge.low", + translation_key="battery_charge_low", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge.restart": SensorEntityDescription( + key="battery.charge.restart", + translation_key="battery_charge_restart", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge.warning": SensorEntityDescription( + key="battery.charge.warning", + translation_key="battery_charge_warning", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charger.status": SensorEntityDescription( + key="battery.charger.status", + translation_key="battery_charger_status", + ), + "battery.current": SensorEntityDescription( + key="battery.current", + translation_key="battery_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.current.total": SensorEntityDescription( + key="battery.current.total", + translation_key="battery_current_total", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.date": SensorEntityDescription( + key="battery.date", + translation_key="battery_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.mfr.date": SensorEntityDescription( + key="battery.mfr.date", + translation_key="battery_mfr_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.packs": SensorEntityDescription( + key="battery.packs", + translation_key="battery_packs", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.packs.bad": SensorEntityDescription( + key="battery.packs.bad", + translation_key="battery_packs_bad", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime": SensorEntityDescription( + key="battery.runtime", + translation_key="battery_runtime", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime.low": SensorEntityDescription( + key="battery.runtime.low", + translation_key="battery_runtime_low", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime.restart": SensorEntityDescription( + key="battery.runtime.restart", + translation_key="battery_runtime_restart", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.temperature": SensorEntityDescription( + key="battery.temperature", + translation_key="battery_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.load": SensorEntityDescription( - key="ups.load", - translation_key="ups_load", + "battery.type": SensorEntityDescription( + key="battery.type", + translation_key="battery_type", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage": SensorEntityDescription( + key="battery.voltage", + translation_key="battery_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.high": SensorEntityDescription( + key="battery.voltage.high", + translation_key="battery_voltage_high", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.low": SensorEntityDescription( + key="battery.voltage.low", + translation_key="battery_voltage_low", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.nominal": SensorEntityDescription( + key="battery.voltage.nominal", + translation_key="battery_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.current": SensorEntityDescription( + key="input.bypass.current", + translation_key="input_bypass_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.frequency": SensorEntityDescription( + key="input.bypass.frequency", + translation_key="input_bypass_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1.current": SensorEntityDescription( + key="input.bypass.L1.current", + translation_key="input_bypass_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1-N.voltage": SensorEntityDescription( + key="input.bypass.L1-N.voltage", + translation_key="input_bypass_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1.realpower": SensorEntityDescription( + key="input.bypass.L1.realpower", + translation_key="input_bypass_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2.current": SensorEntityDescription( + key="input.bypass.L2.current", + translation_key="input_bypass_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2-N.voltage": SensorEntityDescription( + key="input.bypass.L2-N.voltage", + translation_key="input_bypass_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2.realpower": SensorEntityDescription( + key="input.bypass.L2.realpower", + translation_key="input_bypass_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3.current": SensorEntityDescription( + key="input.bypass.L3.current", + translation_key="input_bypass_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3-N.voltage": SensorEntityDescription( + key="input.bypass.L3-N.voltage", + translation_key="input_bypass_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3.realpower": SensorEntityDescription( + key="input.bypass.L3.realpower", + translation_key="input_bypass_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.phases": SensorEntityDescription( + key="input.bypass.phases", + translation_key="input_bypass_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.realpower": SensorEntityDescription( + key="input.bypass.realpower", + translation_key="input_bypass_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.voltage": SensorEntityDescription( + key="input.bypass.voltage", + translation_key="input_bypass_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.current": SensorEntityDescription( + key="input.current", + translation_key="input_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "input.current.status": SensorEntityDescription( + key="input.current.status", + translation_key="input_current_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency": SensorEntityDescription( + key="input.frequency", + translation_key="input_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency.nominal": SensorEntityDescription( + key="input.frequency.nominal", + translation_key="input_frequency_nominal", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency.status": SensorEntityDescription( + key="input.frequency.status", + translation_key="input_frequency_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.current": SensorEntityDescription( + key="input.L1.current", + translation_key="input_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.frequency": SensorEntityDescription( + key="input.L1.frequency", + translation_key="input_l1_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1-N.voltage": SensorEntityDescription( + key="input.L1-N.voltage", + translation_key="input_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.realpower": SensorEntityDescription( + key="input.L1.realpower", + translation_key="input_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.current": SensorEntityDescription( + key="input.L2.current", + translation_key="input_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.frequency": SensorEntityDescription( + key="input.L2.frequency", + translation_key="input_l2_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2-N.voltage": SensorEntityDescription( + key="input.L2-N.voltage", + translation_key="input_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.realpower": SensorEntityDescription( + key="input.L2.realpower", + translation_key="input_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.current": SensorEntityDescription( + key="input.L3.current", + translation_key="input_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.frequency": SensorEntityDescription( + key="input.L3.frequency", + translation_key="input_l3_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3-N.voltage": SensorEntityDescription( + key="input.L3-N.voltage", + translation_key="input_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.realpower": SensorEntityDescription( + key="input.L3.realpower", + translation_key="input_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.load": SensorEntityDescription( + key="input.load", + translation_key="input_load", native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, ), - "ups.load.high": SensorEntityDescription( - key="ups.load.high", - translation_key="ups_load_high", + "input.phases": SensorEntityDescription( + key="input.phases", + translation_key="input_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.power": SensorEntityDescription( + key="input.power", + translation_key="input_power", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.realpower": SensorEntityDescription( + key="input.realpower", + translation_key="input_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.sensitivity": SensorEntityDescription( + key="input.sensitivity", + translation_key="input_sensitivity", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.high": SensorEntityDescription( + key="input.transfer.high", + translation_key="input_transfer_high", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.low": SensorEntityDescription( + key="input.transfer.low", + translation_key="input_transfer_low", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.reason": SensorEntityDescription( + key="input.transfer.reason", + translation_key="input_transfer_reason", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.voltage": SensorEntityDescription( + key="input.voltage", + translation_key="input_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "input.voltage.nominal": SensorEntityDescription( + key="input.voltage.nominal", + translation_key="input_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.voltage.status": SensorEntityDescription( + key="input.voltage.status", + translation_key="input_voltage_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "outlet.voltage": SensorEntityDescription( + key="outlet.voltage", + translation_key="outlet_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "output.current": SensorEntityDescription( + key="output.current", + translation_key="output_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.current.nominal": SensorEntityDescription( + key="output.current.nominal", + translation_key="output_current_nominal", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.frequency": SensorEntityDescription( + key="output.frequency", + translation_key="output_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.frequency.nominal": SensorEntityDescription( + key="output.frequency.nominal", + translation_key="output_frequency_nominal", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1.current": SensorEntityDescription( + key="output.L1.current", + translation_key="output_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1-N.voltage": SensorEntityDescription( + key="output.L1-N.voltage", + translation_key="output_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1.power.percent": SensorEntityDescription( + key="output.L1.power.percent", + translation_key="output_l1_power_percent", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.id": SensorEntityDescription( - key="ups.id", - translation_key="ups_id", + "output.L1.realpower": SensorEntityDescription( + key="output.L1.realpower", + translation_key="output_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.delay.start": SensorEntityDescription( - key="ups.delay.start", - translation_key="ups_delay_start", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, + "output.L2.current": SensorEntityDescription( + key="output.L2.current", + translation_key="output_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2-N.voltage": SensorEntityDescription( + key="output.L2-N.voltage", + translation_key="output_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2.power.percent": SensorEntityDescription( + key="output.L2.power.percent", + translation_key="output_l2_power_percent", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2.realpower": SensorEntityDescription( + key="output.L2.realpower", + translation_key="output_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.current": SensorEntityDescription( + key="output.L3.current", + translation_key="output_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3-N.voltage": SensorEntityDescription( + key="output.L3-N.voltage", + translation_key="output_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.power.percent": SensorEntityDescription( + key="output.L3.power.percent", + translation_key="output_l3_power_percent", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.realpower": SensorEntityDescription( + key="output.L3.realpower", + translation_key="output_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.phases": SensorEntityDescription( + key="output.phases", + translation_key="output_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.power": SensorEntityDescription( + key="output.power", + translation_key="output_power", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.power.nominal": SensorEntityDescription( + key="output.power.nominal", + translation_key="output_power_nominal", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.realpower": SensorEntityDescription( + key="output.realpower", + translation_key="output_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.realpower.nominal": SensorEntityDescription( + key="output.realpower.nominal", + translation_key="output_realpower_nominal", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.voltage": SensorEntityDescription( + key="output.voltage", + translation_key="output_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "output.voltage.nominal": SensorEntityDescription( + key="output.voltage.nominal", + translation_key="output_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.alarm": SensorEntityDescription( + key="ups.alarm", + translation_key="ups_alarm", + ), + "ups.beeper.status": SensorEntityDescription( + key="ups.beeper.status", + translation_key="ups_beeper_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.contacts": SensorEntityDescription( + key="ups.contacts", + translation_key="ups_contacts", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -129,62 +817,20 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.timer.start": SensorEntityDescription( - key="ups.timer.start", - translation_key="ups_timer_start", + "ups.delay.start": SensorEntityDescription( + key="ups.delay.start", + translation_key="ups_delay_start", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.timer.reboot": SensorEntityDescription( - key="ups.timer.reboot", - translation_key="ups_timer_reboot", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.timer.shutdown": SensorEntityDescription( - key="ups.timer.shutdown", - translation_key="ups_timer_shutdown", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.interval": SensorEntityDescription( - key="ups.test.interval", - translation_key="ups_test_interval", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.result": SensorEntityDescription( - key="ups.test.result", - translation_key="ups_test_result", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.date": SensorEntityDescription( - key="ups.test.date", - translation_key="ups_test_date", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), "ups.display.language": SensorEntityDescription( key="ups.display.language", translation_key="ups_display_language", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.contacts": SensorEntityDescription( - key="ups.contacts", - translation_key="ups_contacts", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), "ups.efficiency": SensorEntityDescription( key="ups.efficiency", translation_key="ups_efficiency", @@ -193,6 +839,25 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), + "ups.id": SensorEntityDescription( + key="ups.id", + translation_key="ups_id", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.load": SensorEntityDescription( + key="ups.load", + translation_key="ups_load", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "ups.load.high": SensorEntityDescription( + key="ups.load.high", + translation_key="ups_load_high", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), "ups.power": SensorEntityDescription( key="ups.power", translation_key="ups_power", @@ -227,21 +892,9 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.beeper.status": SensorEntityDescription( - key="ups.beeper.status", - translation_key="ups_beeper_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.type": SensorEntityDescription( - key="ups.type", - translation_key="ups_type", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.watchdog.status": SensorEntityDescription( - key="ups.watchdog.status", - translation_key="ups_watchdog_status", + "ups.shutdown": SensorEntityDescription( + key="ups.shutdown", + translation_key="ups_shutdown", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -263,772 +916,89 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.shutdown": SensorEntityDescription( - key="ups.shutdown", - translation_key="ups_shutdown", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, + "ups.status": SensorEntityDescription( + key="ups.status", + translation_key="ups_status", ), - "battery.charge": SensorEntityDescription( - key="battery.charge", - translation_key="battery_charge", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.BATTERY, - state_class=SensorStateClass.MEASUREMENT, + "ups.status.display": SensorEntityDescription( + key="ups.status.display", + translation_key="ups_status_display", ), - "battery.charge.low": SensorEntityDescription( - key="battery.charge.low", - translation_key="battery_charge_low", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charge.restart": SensorEntityDescription( - key="battery.charge.restart", - translation_key="battery_charge_restart", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charge.warning": SensorEntityDescription( - key="battery.charge.warning", - translation_key="battery_charge_warning", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charger.status": SensorEntityDescription( - key="battery.charger.status", - translation_key="battery_charger_status", - ), - "battery.voltage": SensorEntityDescription( - key="battery.voltage", - translation_key="battery_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.nominal": SensorEntityDescription( - key="battery.voltage.nominal", - translation_key="battery_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.low": SensorEntityDescription( - key="battery.voltage.low", - translation_key="battery_voltage_low", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.high": SensorEntityDescription( - key="battery.voltage.high", - translation_key="battery_voltage_high", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.capacity": SensorEntityDescription( - key="battery.capacity", - translation_key="battery_capacity", - native_unit_of_measurement="Ah", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.current": SensorEntityDescription( - key="battery.current", - translation_key="battery_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.current.total": SensorEntityDescription( - key="battery.current.total", - translation_key="battery_current_total", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.temperature": SensorEntityDescription( - key="battery.temperature", - translation_key="battery_temperature", + "ups.temperature": SensorEntityDescription( + key="ups.temperature", + translation_key="ups_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime": SensorEntityDescription( - key="battery.runtime", - translation_key="battery_runtime", + "ups.test.date": SensorEntityDescription( + key="ups.test.date", + translation_key="ups_test_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.test.interval": SensorEntityDescription( + key="ups.test.interval", + translation_key="ups_test_interval", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime.low": SensorEntityDescription( - key="battery.runtime.low", - translation_key="battery_runtime_low", + "ups.test.result": SensorEntityDescription( + key="ups.test.result", + translation_key="ups_test_result", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.timer.reboot": SensorEntityDescription( + key="ups.timer.reboot", + translation_key="ups_timer_reboot", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime.restart": SensorEntityDescription( - key="battery.runtime.restart", - translation_key="battery_runtime_restart", + "ups.timer.shutdown": SensorEntityDescription( + key="ups.timer.shutdown", + translation_key="ups_timer_shutdown", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.alarm.threshold": SensorEntityDescription( - key="battery.alarm.threshold", - translation_key="battery_alarm_threshold", + "ups.timer.start": SensorEntityDescription( + key="ups.timer.start", + translation_key="ups_timer_start", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.date": SensorEntityDescription( - key="battery.date", - translation_key="battery_date", + "ups.type": SensorEntityDescription( + key="ups.type", + translation_key="ups_type", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.mfr.date": SensorEntityDescription( - key="battery.mfr.date", - translation_key="battery_mfr_date", + "ups.watchdog.status": SensorEntityDescription( + key="ups.watchdog.status", + translation_key="ups_watchdog_status", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.packs": SensorEntityDescription( - key="battery.packs", - translation_key="battery_packs", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.packs.bad": SensorEntityDescription( - key="battery.packs.bad", - translation_key="battery_packs_bad", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.type": SensorEntityDescription( - key="battery.type", - translation_key="battery_type", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.sensitivity": SensorEntityDescription( - key="input.sensitivity", - translation_key="input_sensitivity", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.low": SensorEntityDescription( - key="input.transfer.low", - translation_key="input_transfer_low", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.high": SensorEntityDescription( - key="input.transfer.high", - translation_key="input_transfer_high", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.reason": SensorEntityDescription( - key="input.transfer.reason", - translation_key="input_transfer_reason", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.voltage": SensorEntityDescription( - key="input.voltage", - translation_key="input_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "input.voltage.nominal": SensorEntityDescription( - key="input.voltage.nominal", - translation_key="input_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.voltage.status": SensorEntityDescription( - key="input.voltage.status", - translation_key="input_voltage_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1-N.voltage": SensorEntityDescription( - key="input.L1-N.voltage", - translation_key="input_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2-N.voltage": SensorEntityDescription( - key="input.L2-N.voltage", - translation_key="input_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3-N.voltage": SensorEntityDescription( - key="input.L3-N.voltage", - translation_key="input_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency": SensorEntityDescription( - key="input.frequency", - translation_key="input_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency.nominal": SensorEntityDescription( - key="input.frequency.nominal", - translation_key="input_frequency_nominal", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency.status": SensorEntityDescription( - key="input.frequency.status", - translation_key="input_frequency_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1.frequency": SensorEntityDescription( - key="input.L1.frequency", - translation_key="input_l1_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.frequency": SensorEntityDescription( - key="input.L2.frequency", - translation_key="input_l2_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.frequency": SensorEntityDescription( - key="input.L3.frequency", - translation_key="input_l3_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.current": SensorEntityDescription( - key="input.bypass.current", - translation_key="input_bypass_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1.current": SensorEntityDescription( - key="input.bypass.L1.current", - translation_key="input_bypass_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2.current": SensorEntityDescription( - key="input.bypass.L2.current", - translation_key="input_bypass_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3.current": SensorEntityDescription( - key="input.bypass.L3.current", - translation_key="input_bypass_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.frequency": SensorEntityDescription( - key="input.bypass.frequency", - translation_key="input_bypass_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.phases": SensorEntityDescription( - key="input.bypass.phases", - translation_key="input_bypass_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.realpower": SensorEntityDescription( - key="input.bypass.realpower", - translation_key="input_bypass_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1.realpower": SensorEntityDescription( - key="input.bypass.L1.realpower", - translation_key="input_bypass_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2.realpower": SensorEntityDescription( - key="input.bypass.L2.realpower", - translation_key="input_bypass_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3.realpower": SensorEntityDescription( - key="input.bypass.L3.realpower", - translation_key="input_bypass_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.voltage": SensorEntityDescription( - key="input.bypass.voltage", - translation_key="input_bypass_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1-N.voltage": SensorEntityDescription( - key="input.bypass.L1-N.voltage", - translation_key="input_bypass_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2-N.voltage": SensorEntityDescription( - key="input.bypass.L2-N.voltage", - translation_key="input_bypass_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3-N.voltage": SensorEntityDescription( - key="input.bypass.L3-N.voltage", - translation_key="input_bypass_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.current": SensorEntityDescription( - key="input.current", - translation_key="input_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_registry_enabled_default=False, - ), - "input.current.status": SensorEntityDescription( - key="input.current.status", - translation_key="input_current_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1.current": SensorEntityDescription( - key="input.L1.current", - translation_key="input_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.current": SensorEntityDescription( - key="input.L2.current", - translation_key="input_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.current": SensorEntityDescription( - key="input.L3.current", - translation_key="input_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.load": SensorEntityDescription( - key="input.load", - translation_key="input_load", - native_unit_of_measurement=PERCENTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "input.phases": SensorEntityDescription( - key="input.phases", - translation_key="input_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.power": SensorEntityDescription( - key="input.power", - translation_key="input_power", - device_class=SensorDeviceClass.APPARENT_POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.realpower": SensorEntityDescription( - key="input.realpower", - translation_key="input_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1.realpower": SensorEntityDescription( - key="input.L1.realpower", - translation_key="input_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.realpower": SensorEntityDescription( - key="input.L2.realpower", - translation_key="input_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.realpower": SensorEntityDescription( - key="input.L3.realpower", - translation_key="input_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "outlet.voltage": SensorEntityDescription( - key="outlet.voltage", - translation_key="outlet_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "output.power.nominal": SensorEntityDescription( - key="output.power.nominal", - translation_key="output_power_nominal", - native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, - device_class=SensorDeviceClass.APPARENT_POWER, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.power.percent": SensorEntityDescription( - key="output.L1.power.percent", - translation_key="output_l1_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.power.percent": SensorEntityDescription( - key="output.L2.power.percent", - translation_key="output_l2_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.power.percent": SensorEntityDescription( - key="output.L3.power.percent", - translation_key="output_l3_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.current": SensorEntityDescription( - key="output.current", - translation_key="output_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.current.nominal": SensorEntityDescription( - key="output.current.nominal", - translation_key="output_current_nominal", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.current": SensorEntityDescription( - key="output.L1.current", - translation_key="output_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.current": SensorEntityDescription( - key="output.L2.current", - translation_key="output_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.current": SensorEntityDescription( - key="output.L3.current", - translation_key="output_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.voltage": SensorEntityDescription( - key="output.voltage", - translation_key="output_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "output.voltage.nominal": SensorEntityDescription( - key="output.voltage.nominal", - translation_key="output_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1-N.voltage": SensorEntityDescription( - key="output.L1-N.voltage", - translation_key="output_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2-N.voltage": SensorEntityDescription( - key="output.L2-N.voltage", - translation_key="output_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3-N.voltage": SensorEntityDescription( - key="output.L3-N.voltage", - translation_key="output_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.frequency": SensorEntityDescription( - key="output.frequency", - translation_key="output_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.frequency.nominal": SensorEntityDescription( - key="output.frequency.nominal", - translation_key="output_frequency_nominal", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.phases": SensorEntityDescription( - key="output.phases", - translation_key="output_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.power": SensorEntityDescription( - key="output.power", - translation_key="output_power", - native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, - device_class=SensorDeviceClass.APPARENT_POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.realpower": SensorEntityDescription( - key="output.realpower", - translation_key="output_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.realpower.nominal": SensorEntityDescription( - key="output.realpower.nominal", - translation_key="output_realpower_nominal", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.realpower": SensorEntityDescription( - key="output.L1.realpower", - translation_key="output_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.realpower": SensorEntityDescription( - key="output.L2.realpower", - translation_key="output_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.realpower": SensorEntityDescription( - key="output.L3.realpower", - translation_key="output_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ambient.humidity": SensorEntityDescription( - key="ambient.humidity", - translation_key="ambient_humidity", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.HUMIDITY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.humidity.status": SensorEntityDescription( - key="ambient.humidity.status", - translation_key="ambient_humidity_status", - device_class=SensorDeviceClass.ENUM, - options=AMBIENT_THRESHOLD_STATUS_OPTIONS, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.temperature": SensorEntityDescription( - key="ambient.temperature", - translation_key="ambient_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.temperature.status": SensorEntityDescription( - key="ambient.temperature.status", - translation_key="ambient_temperature_status", - device_class=SensorDeviceClass.ENUM, - options=AMBIENT_THRESHOLD_STATUS_OPTIONS, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "watts": SensorEntityDescription( - key="watts", - translation_key="watts", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - ), } -def _get_nut_device_info(data: PyNUTData) -> DeviceInfo: - """Return a DeviceInfo object filled with NUT device info.""" - nut_dev_infos = asdict(data.device_info) - nut_infos = { - info_key: nut_dev_infos[nut_key] - for nut_key, info_key in NUT_DEV_INFO_TO_DEV_INFO.items() - if nut_dev_infos[nut_key] is not None - } - - return cast(DeviceInfo, nut_infos) - - async def async_setup_entry( hass: HomeAssistant, config_entry: NutConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the NUT sensors.""" + valid_sensor_types: dict[str, SensorEntityDescription] pynut_data = config_entry.runtime_data coordinator = pynut_data.coordinator @@ -1036,20 +1006,75 @@ async def async_setup_entry( unique_id = pynut_data.unique_id status = coordinator.data - resources = [sensor_id for sensor_id in SENSOR_TYPES if sensor_id in status] - # Display status is a special case that falls back to the status value - # of the UPS instead. - if KEY_STATUS in resources: - resources.append(KEY_STATUS_DISPLAY) + # Dynamically add outlet sensors to valid sensors dictionary + if (num_outlets := status.get("outlet.count")) is not None: + additional_sensor_types: dict[str, SensorEntityDescription] = {} + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str: str = str(outlet_num) + outlet_name: str = ( + status.get(f"outlet.{outlet_num_str}.name") or outlet_num_str + ) + additional_sensor_types |= { + f"outlet.{outlet_num_str}.current": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.current", + translation_key="outlet_number_current", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + ), + f"outlet.{outlet_num_str}.current_status": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.current_status", + translation_key="outlet_number_current_status", + translation_placeholders={"outlet_name": outlet_name}, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + f"outlet.{outlet_num_str}.desc": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.desc", + translation_key="outlet_number_desc", + translation_placeholders={"outlet_name": outlet_name}, + ), + f"outlet.{outlet_num_str}.power": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.power", + translation_key="outlet_number_power", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + f"outlet.{outlet_num_str}.realpower": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.realpower", + translation_key="outlet_number_realpower", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + } + + valid_sensor_types = {**SENSOR_TYPES, **additional_sensor_types} + else: + valid_sensor_types = SENSOR_TYPES # If device reports ambient sensors are not present, then remove - if status.get(AMBIENT_PRESENT) == "no": - resources = [item for item in resources if item not in AMBIENT_SENSORS] + has_ambient_sensors: bool = status.get(AMBIENT_PRESENT) != "no" + resources = [ + sensor_id + for sensor_id in valid_sensor_types + if sensor_id in status + and (has_ambient_sensors or sensor_id not in AMBIENT_SENSORS) + ] + + # Display status is a special case that falls back to the status value + # of the UPS instead. + if KEY_STATUS in status: + resources.append(KEY_STATUS_DISPLAY) async_add_entities( NUTSensor( coordinator, - SENSOR_TYPES[sensor_type], + valid_sensor_types[sensor_type], data, unique_id, ) @@ -1057,33 +1082,12 @@ async def async_setup_entry( ) -class NUTSensor(CoordinatorEntity[DataUpdateCoordinator[dict[str, str]]], SensorEntity): +class NUTSensor(NUTBaseEntity, SensorEntity): """Representation of a sensor entity for NUT status values.""" - _attr_has_entity_name = True - - def __init__( - self, - coordinator: DataUpdateCoordinator[dict[str, str]], - sensor_description: SensorEntityDescription, - data: PyNUTData, - unique_id: str, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - self.entity_description = sensor_description - - device_name = data.name.title() - self._attr_unique_id = f"{unique_id}_{sensor_description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, - name=device_name, - ) - self._attr_device_info.update(_get_nut_device_info(data)) - @property def native_value(self) -> str | None: - """Return entity state from ups.""" + """Return entity state from NUT device.""" status = self.coordinator.data if self.entity_description.key == KEY_STATUS_DISPLAY: return _format_display_state(status) diff --git a/homeassistant/components/nut/strings.json b/homeassistant/components/nut/strings.json index 08971732bc6..4d8ffd45475 100644 --- a/homeassistant/components/nut/strings.json +++ b/homeassistant/components/nut/strings.json @@ -29,8 +29,8 @@ }, "error": { "cannot_connect": "Connection error: {error}", - "unknown": "[%key:common::config_flow::error::unknown%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", @@ -74,14 +74,13 @@ "test_failure_stop": "Stop simulating a power failure", "test_panel_start": "Start testing the UPS panel", "test_panel_stop": "Stop a UPS panel test", - "test_system_start": "Start a system test", - "outlet_1_load_on": "Power outlet 1 on", - "outlet_1_load_off": "Power outlet 1 off", - "outlet_2_load_on": "Power outlet 2 on", - "outlet_2_load_off": "Power outlet 2 off" + "test_system_start": "Start a system test" } }, "entity": { + "button": { + "outlet_number_load_cycle": { "name": "Power cycle outlet {outlet_name}" } + }, "sensor": { "ambient_humidity": { "name": "Ambient humidity" }, "ambient_humidity_status": { "name": "Ambient humidity status" }, @@ -110,43 +109,40 @@ "battery_voltage_low": { "name": "Low battery voltage" }, "battery_voltage_nominal": { "name": "Nominal battery voltage" }, "input_bypass_current": { "name": "Input bypass current" }, - "input_bypass_l1_current": { "name": "Input bypass L1 current" }, - "input_bypass_l2_current": { "name": "Input bypass L2 current" }, - "input_bypass_l3_current": { "name": "Input bypass L3 current" }, - "input_bypass_voltage": { "name": "Input bypass voltage" }, - "input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" }, - "input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" }, - "input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" }, "input_bypass_frequency": { "name": "Input bypass frequency" }, + "input_bypass_l1_current": { "name": "Input bypass L1 current" }, + "input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" }, + "input_bypass_l1_realpower": { "name": "Input bypass L1 real power" }, + "input_bypass_l2_current": { "name": "Input bypass L2 current" }, + "input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" }, + "input_bypass_l2_realpower": { "name": "Input bypass L2 real power" }, + "input_bypass_l3_current": { "name": "Input bypass L3 current" }, + "input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" }, + "input_bypass_l3_realpower": { "name": "Input bypass L3 real power" }, "input_bypass_phases": { "name": "Input bypass phases" }, "input_bypass_realpower": { "name": "Input bypass real power" }, - "input_bypass_l1_realpower": { - "name": "Input bypass L1 real power" - }, - "input_bypass_l2_realpower": { - "name": "Input bypass L2 real power" - }, - "input_bypass_l3_realpower": { - "name": "Input bypass L3 real power" - }, + "input_bypass_voltage": { "name": "Input bypass voltage" }, "input_current": { "name": "Input current" }, "input_current_status": { "name": "Input current status" }, - "input_l1_current": { "name": "Input L1 current" }, - "input_l2_current": { "name": "Input L2 current" }, - "input_l3_current": { "name": "Input L3 current" }, "input_frequency": { "name": "Input frequency" }, "input_frequency_nominal": { "name": "Input nominal frequency" }, "input_frequency_status": { "name": "Input frequency status" }, + "input_l1_current": { "name": "Input L1 current" }, "input_l1_frequency": { "name": "Input L1 line frequency" }, + "input_l1_n_voltage": { "name": "Input L1 voltage" }, + "input_l1_realpower": { "name": "Input L1 real power" }, + "input_l2_current": { "name": "Input L2 current" }, "input_l2_frequency": { "name": "Input L2 line frequency" }, + "input_l2_n_voltage": { "name": "Input L2 voltage" }, + "input_l2_realpower": { "name": "Input L2 real power" }, + "input_l3_current": { "name": "Input L3 current" }, "input_l3_frequency": { "name": "Input L3 line frequency" }, + "input_l3_n_voltage": { "name": "Input L3 voltage" }, + "input_l3_realpower": { "name": "Input L3 real power" }, + "input_load": { "name": "Input load" }, "input_phases": { "name": "Input phases" }, "input_power": { "name": "Input power" }, "input_realpower": { "name": "Input real power" }, - "input_l1_realpower": { "name": "Input L1 real power" }, - "input_l2_realpower": { "name": "Input L2 real power" }, - "input_l3_realpower": { "name": "Input L3 real power" }, - "input_load": { "name": "Input load" }, "input_sensitivity": { "name": "Input power sensitivity" }, "input_transfer_high": { "name": "High voltage transfer" }, "input_transfer_low": { "name": "Low voltage transfer" }, @@ -154,33 +150,37 @@ "input_voltage": { "name": "Input voltage" }, "input_voltage_nominal": { "name": "Nominal input voltage" }, "input_voltage_status": { "name": "Input voltage status" }, - "input_l1_n_voltage": { "name": "Input L1 voltage" }, - "input_l2_n_voltage": { "name": "Input L2 voltage" }, - "input_l3_n_voltage": { "name": "Input L3 voltage" }, + "outlet_number_current": { "name": "Outlet {outlet_name} current" }, + "outlet_number_current_status": { + "name": "Outlet {outlet_name} current status" + }, + "outlet_number_desc": { "name": "Outlet {outlet_name} description" }, + "outlet_number_power": { "name": "Outlet {outlet_name} power" }, + "outlet_number_realpower": { "name": "Outlet {outlet_name} real power" }, "outlet_voltage": { "name": "Outlet voltage" }, "output_current": { "name": "Output current" }, "output_current_nominal": { "name": "Nominal output current" }, - "output_l1_current": { "name": "Output L1 current" }, - "output_l2_current": { "name": "Output L2 current" }, - "output_l3_current": { "name": "Output L3 current" }, "output_frequency": { "name": "Output frequency" }, "output_frequency_nominal": { "name": "Nominal output frequency" }, + "output_l1_current": { "name": "Output L1 current" }, + "output_l1_n_voltage": { "name": "Output L1-N voltage" }, + "output_l1_power_percent": { "name": "Output L1 power usage" }, + "output_l1_realpower": { "name": "Output L1 real power" }, + "output_l2_current": { "name": "Output L2 current" }, + "output_l2_n_voltage": { "name": "Output L2-N voltage" }, + "output_l2_power_percent": { "name": "Output L2 power usage" }, + "output_l2_realpower": { "name": "Output L2 real power" }, + "output_l3_current": { "name": "Output L3 current" }, + "output_l3_n_voltage": { "name": "Output L3-N voltage" }, + "output_l3_power_percent": { "name": "Output L3 power usage" }, + "output_l3_realpower": { "name": "Output L3 real power" }, "output_phases": { "name": "Output phases" }, "output_power": { "name": "Output apparent power" }, - "output_l2_power_percent": { "name": "Output L2 power usage" }, - "output_l1_power_percent": { "name": "Output L1 power usage" }, - "output_l3_power_percent": { "name": "Output L3 power usage" }, "output_power_nominal": { "name": "Nominal output power" }, "output_realpower": { "name": "Output real power" }, "output_realpower_nominal": { "name": "Nominal output real power" }, - "output_l1_realpower": { "name": "Output L1 real power" }, - "output_l2_realpower": { "name": "Output L2 real power" }, - "output_l3_realpower": { "name": "Output L3 real power" }, "output_voltage": { "name": "Output voltage" }, "output_voltage_nominal": { "name": "Nominal output voltage" }, - "output_l1_n_voltage": { "name": "Output L1-N voltage" }, - "output_l2_n_voltage": { "name": "Output L2-N voltage" }, - "output_l3_n_voltage": { "name": "Output L3-N voltage" }, "ups_alarm": { "name": "Alarms" }, "ups_beeper_status": { "name": "Beeper status" }, "ups_contacts": { "name": "External contacts" }, @@ -212,8 +212,10 @@ "ups_timer_shutdown": { "name": "Load shutdown timer" }, "ups_timer_start": { "name": "Load start timer" }, "ups_type": { "name": "UPS type" }, - "ups_watchdog_status": { "name": "Watchdog status" }, - "watts": { "name": "Watts" } + "ups_watchdog_status": { "name": "Watchdog status" } + }, + "switch": { + "outlet_number_load_poweronoff": { "name": "Power outlet {outlet_name}" } } } } diff --git a/homeassistant/components/nut/switch.py b/homeassistant/components/nut/switch.py new file mode 100644 index 00000000000..924a596cc8e --- /dev/null +++ b/homeassistant/components/nut/switch.py @@ -0,0 +1,90 @@ +"""Provides a switch for switchable NUT outlets.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import NutConfigEntry +from .entity import NUTBaseEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NutConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the NUT switches.""" + pynut_data = config_entry.runtime_data + coordinator = pynut_data.coordinator + status = coordinator.data + + # Dynamically add outlet switch types + if (num_outlets := status.get("outlet.count")) is None: + return + + data = pynut_data.data + unique_id = pynut_data.unique_id + user_available_commands = pynut_data.user_available_commands + switch_descriptions = [ + SwitchEntityDescription( + key=f"outlet.{outlet_num!s}.load.poweronoff", + translation_key="outlet_number_load_poweronoff", + translation_placeholders={ + "outlet_name": status.get(f"outlet.{outlet_num!s}.name") + or str(outlet_num) + }, + device_class=SwitchDeviceClass.OUTLET, + entity_registry_enabled_default=True, + ) + for outlet_num in range(1, int(num_outlets) + 1) + if ( + status.get(f"outlet.{outlet_num!s}.switchable") == "yes" + and f"outlet.{outlet_num!s}.load.on" in user_available_commands + and f"outlet.{outlet_num!s}.load.off" in user_available_commands + ) + ] + + async_add_entities( + NUTSwitch(coordinator, description, data, unique_id) + for description in switch_descriptions + ) + + +class NUTSwitch(NUTBaseEntity, SwitchEntity): + """Representation of a switch entity for NUT status values.""" + + @property + def is_on(self) -> bool | None: + """Return the state of the switch.""" + status = self.coordinator.data + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + if (state := status.get(f"{outlet}.{outlet_num_str}.status")) is None: + return None + return bool(state == "on") + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the device.""" + + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + command_name = f"{outlet}.{outlet_num_str}.load.on" + await self.pynut_data.async_run_command(command_name) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the device.""" + + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + command_name = f"{outlet}.{outlet_num_str}.load.off" + await self.pynut_data.async_run_command(command_name) diff --git a/homeassistant/components/nws/sensor.py b/homeassistant/components/nws/sensor.py index 4cfb3b85e0f..8a7631d8381 100644 --- a/homeassistant/components/nws/sensor.py +++ b/homeassistant/components/nws/sensor.py @@ -115,6 +115,7 @@ SENSOR_TYPES: tuple[NWSSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, unit_convert=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), NWSSensorEntityDescription( key="barometricPressure", diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 010b45e5a1c..e20eea0a61f 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -85,7 +85,8 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): raise err from None except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if errors: diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py index 748ea558983..1037c3a7c8b 100644 --- a/homeassistant/components/ohme/config_flow.py +++ b/homeassistant/components/ohme/config_flow.py @@ -99,6 +99,29 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-configuration.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + if user_input: + errors = await self._validate_account( + reconfigure_entry.data[CONF_EMAIL], + user_input[CONF_PASSWORD], + ) + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=REAUTH_SCHEMA, + description_placeholders={"email": reconfigure_entry.data[CONF_EMAIL]}, + errors=errors, + ) + async def _validate_account(self, email: str, password: str) -> dict[str, str]: """Validate Ohme account and return dict of errors.""" errors: dict[str, str] = {} diff --git a/homeassistant/components/ohme/diagnostics.py b/homeassistant/components/ohme/diagnostics.py new file mode 100644 index 00000000000..a955b3b76e2 --- /dev/null +++ b/homeassistant/components/ohme/diagnostics.py @@ -0,0 +1,24 @@ +"""Provides diagnostics for Ohme.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import OhmeConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: OhmeConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Ohme.""" + coordinators = config_entry.runtime_data + client = coordinators.charge_session_coordinator.client + + return { + "device_info": client.device_info, + "vehicles": client.vehicles, + "ct_connected": client.ct_connected, + "cap_available": client.cap_available, + } diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json index 0e4d58a5294..8613f2542c4 100644 --- a/homeassistant/components/ohme/icons.json +++ b/homeassistant/components/ohme/icons.json @@ -54,6 +54,9 @@ "state": { "off": "mdi:sleep-off" } + }, + "price_cap": { + "default": "mdi:car-speed-limiter" } }, "time": { @@ -65,6 +68,9 @@ "services": { "list_charge_slots": { "service": "mdi:clock-start" + }, + "set_price_cap": { + "service": "mdi:car-speed-limiter" } } } diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index f31af213387..f0021808d92 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["ohme==1.4.0"] + "requirements": ["ohme==1.4.1"] } diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index 497d5ad32e5..f748cf339b4 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -39,7 +39,7 @@ rules: # Gold devices: done - diagnostics: todo + diagnostics: done discovery: status: exempt comment: | @@ -62,7 +62,7 @@ rules: entity-translations: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/ohme/services.py b/homeassistant/components/ohme/services.py index 7d06b909d88..be044f01740 100644 --- a/homeassistant/components/ohme/services.py +++ b/homeassistant/components/ohme/services.py @@ -17,9 +17,11 @@ from homeassistant.helpers import selector from .const import DOMAIN -SERVICE_LIST_CHARGE_SLOTS = "list_charge_slots" ATTR_CONFIG_ENTRY: Final = "config_entry" -SERVICE_SCHEMA: Final = vol.Schema( +ATTR_PRICE_CAP: Final = "price_cap" + +SERVICE_LIST_CHARGE_SLOTS = "list_charge_slots" +SERVICE_LIST_CHARGE_SLOTS_SCHEMA: Final = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( { @@ -29,6 +31,18 @@ SERVICE_SCHEMA: Final = vol.Schema( } ) +SERVICE_SET_PRICE_CAP = "set_price_cap" +SERVICE_SET_PRICE_CAP_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( + { + "integration": DOMAIN, + } + ), + vol.Required(ATTR_PRICE_CAP): vol.Coerce(float), + } +) + def __get_client(call: ServiceCall) -> OhmeApiClient: """Get the client from the config entry.""" @@ -66,10 +80,26 @@ def async_setup_services(hass: HomeAssistant) -> None: return {"slots": client.slots} + async def set_price_cap( + service_call: ServiceCall, + ) -> None: + """List of charge slots.""" + client = __get_client(service_call) + price_cap = service_call.data[ATTR_PRICE_CAP] + await client.async_change_price_cap(cap=price_cap) + hass.services.async_register( DOMAIN, SERVICE_LIST_CHARGE_SLOTS, list_charge_slots, - schema=SERVICE_SCHEMA, + schema=SERVICE_LIST_CHARGE_SLOTS_SCHEMA, supports_response=SupportsResponse.ONLY, ) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_PRICE_CAP, + set_price_cap, + schema=SERVICE_SET_PRICE_CAP_SCHEMA, + supports_response=SupportsResponse.NONE, + ) diff --git a/homeassistant/components/ohme/services.yaml b/homeassistant/components/ohme/services.yaml index c5c8ee18138..a45bc131511 100644 --- a/homeassistant/components/ohme/services.yaml +++ b/homeassistant/components/ohme/services.yaml @@ -5,3 +5,16 @@ list_charge_slots: selector: config_entry: integration: ohme +set_price_cap: + fields: + config_entry: + required: true + selector: + config_entry: + integration: ohme + price_cap: + required: true + selector: + number: + min: 0 + mode: box diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 187e825c159..4a2170babeb 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -21,6 +21,16 @@ "data_description": { "password": "Enter the password for your Ohme account" } + }, + "reconfigure": { + "description": "Update your password for {email}", + "title": "Reconfigure Ohme Account", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Enter the password for your Ohme account" + } } }, "error": { @@ -29,7 +39,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "services": { @@ -42,6 +53,20 @@ "description": "The Ohme config entry for which to return charge slots." } } + }, + "set_price_cap": { + "name": "Set price cap", + "description": "Prevents charging when the electricity price exceeds a defined threshold.", + "fields": { + "config_entry": { + "name": "Ohme account", + "description": "The Ohme config entry for which to return charge slots." + }, + "price_cap": { + "name": "Price cap", + "description": "Threshold in 1/100ths of your local currency." + } + } } }, "entity": { @@ -102,6 +127,9 @@ }, "sleep_when_inactive": { "name": "Sleep when inactive" + }, + "price_cap": { + "name": "Price cap" } }, "time": { diff --git a/homeassistant/components/ohme/switch.py b/homeassistant/components/ohme/switch.py index c4465ec7e97..47e3bf8a99d 100644 --- a/homeassistant/components/ohme/switch.py +++ b/homeassistant/components/ohme/switch.py @@ -1,9 +1,10 @@ """Platform for switch.""" +from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from ohme import ApiException +from ohme import ApiException, OhmeApiClient from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -19,28 +20,37 @@ PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) -class OhmeSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): - """Class describing Ohme switch entities.""" +class OhmeConfigSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): + """Class describing Ohme configuration switch entities.""" configuration_key: str -SWITCH_DEVICE_INFO = [ - OhmeSwitchDescription( +@dataclass(frozen=True, kw_only=True) +class OhmeSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): + """Class describing basic Ohme switch entities.""" + + is_on_fn: Callable[[OhmeApiClient], bool] + off_fn: Callable[[OhmeApiClient], Awaitable] + on_fn: Callable[[OhmeApiClient], Awaitable] + + +SWITCH_CONFIG = [ + OhmeConfigSwitchDescription( key="lock_buttons", translation_key="lock_buttons", entity_category=EntityCategory.CONFIG, is_supported_fn=lambda client: client.is_capable("buttonsLockable"), configuration_key="buttonsLocked", ), - OhmeSwitchDescription( + OhmeConfigSwitchDescription( key="require_approval", translation_key="require_approval", entity_category=EntityCategory.CONFIG, is_supported_fn=lambda client: client.is_capable("pluginsRequireApprovalMode"), configuration_key="pluginsRequireApproval", ), - OhmeSwitchDescription( + OhmeConfigSwitchDescription( key="sleep_when_inactive", translation_key="sleep_when_inactive", entity_category=EntityCategory.CONFIG, @@ -49,6 +59,17 @@ SWITCH_DEVICE_INFO = [ ), ] +SWITCH_DESCRIPTION = [ + OhmeSwitchDescription( + key="price_cap", + translation_key="price_cap", + is_supported_fn=lambda client: client.cap_available, + is_on_fn=lambda client: client.cap_enabled, + on_fn=lambda client: client.async_change_price_cap(True), + off_fn=lambda client: client.async_change_price_cap(False), + ), +] + async def async_setup_entry( hass: HomeAssistant, @@ -56,15 +77,17 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up switches.""" - coordinators = config_entry.runtime_data - coordinator_map = [ - (SWITCH_DEVICE_INFO, coordinators.device_info_coordinator), - ] + coordinator = config_entry.runtime_data.device_info_coordinator + + async_add_entities( + OhmeConfigSwitch(coordinator, description) + for description in SWITCH_CONFIG + if description.is_supported_fn(coordinator.client) + ) async_add_entities( OhmeSwitch(coordinator, description) - for entities, coordinator in coordinator_map - for description in entities + for description in SWITCH_DESCRIPTION if description.is_supported_fn(coordinator.client) ) @@ -74,6 +97,27 @@ class OhmeSwitch(OhmeEntity, SwitchEntity): entity_description: OhmeSwitchDescription + @property + def is_on(self) -> bool: + """Return True if entity is on.""" + return self.entity_description.is_on_fn(self.coordinator.client) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.off_fn(self.coordinator.client) + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.on_fn(self.coordinator.client) + await self.coordinator.async_request_refresh() + + +class OhmeConfigSwitch(OhmeEntity, SwitchEntity): + """Configuration switch for Ohme.""" + + entity_description: OhmeConfigSwitchDescription + @property def is_on(self) -> bool: """Return the entity value to represent the entity state.""" diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index 85daf742035..ab9e05b5fbe 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, intent, llm +from homeassistant.helpers import intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( @@ -206,18 +206,6 @@ class OllamaConversationEntity( """Return a list of supported languages.""" return MATCH_ALL - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, diff --git a/homeassistant/components/omnilogic/strings.json b/homeassistant/components/omnilogic/strings.json index 5b193b7f5ba..6f207337789 100644 --- a/homeassistant/components/omnilogic/strings.json +++ b/homeassistant/components/omnilogic/strings.json @@ -34,7 +34,7 @@ "fields": { "speed": { "name": "Speed", - "description": "Speed for the VSP between min and max speed." + "description": "Speed for the pump between min and max speed." } } } diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py index a590588c009..f0638e72d94 100644 --- a/homeassistant/components/onboarding/views.py +++ b/homeassistant/components/onboarding/views.py @@ -60,6 +60,7 @@ async def async_setup( hass.http.register_view(BackupInfoView(data)) hass.http.register_view(RestoreBackupView(data)) hass.http.register_view(UploadBackupView(data)) + setup_cloud_views(hass, data) class OnboardingView(HomeAssistantView): @@ -367,7 +368,7 @@ class BackupInfoView(BackupOnboardingView): { "backups": list(backups.values()), "state": manager.state, - "last_non_idle_event": manager.last_non_idle_event, + "last_action_event": manager.last_action_event, } ) @@ -429,6 +430,115 @@ class UploadBackupView(BackupOnboardingView, backup_http.UploadBackupView): return await self._post(request) +def setup_cloud_views(hass: HomeAssistant, data: OnboardingStoreData) -> None: + """Set up the cloud views.""" + + # The cloud integration is imported locally to avoid cloud being imported by + # bootstrap.py and to avoid circular imports. + + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.cloud import http_api as cloud_http + + # pylint: disable-next=import-outside-toplevel,hass-component-root-import + from homeassistant.components.cloud.const import DATA_CLOUD + + class CloudOnboardingView(HomeAssistantView): + """Cloud onboarding view.""" + + requires_auth = False + + def __init__(self, data: OnboardingStoreData) -> None: + """Initialize the view.""" + self._data = data + + def with_cloud[_ViewT: CloudOnboardingView, **_P]( + func: Callable[ + Concatenate[_ViewT, web.Request, _P], + Coroutine[Any, Any, web.Response], + ], + ) -> Callable[ + Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response] + ]: + """Home Assistant API decorator to check onboarding and cloud.""" + + @wraps(func) + async def _with_cloud( + self: _ViewT, + request: web.Request, + *args: _P.args, + **kwargs: _P.kwargs, + ) -> web.Response: + """Check onboarding status, cloud and call function.""" + if self._data["done"]: + # If at least one onboarding step is done, we don't allow accessing + # the cloud onboarding views. + raise HTTPUnauthorized + + hass = request.app[KEY_HASS] + if DATA_CLOUD not in hass.data: + return self.json( + {"code": "cloud_disabled"}, + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + + return await func(self, request, *args, **kwargs) + + return _with_cloud + + class CloudForgotPasswordView( + CloudOnboardingView, cloud_http.CloudForgotPasswordView + ): + """View to start Forgot Password flow.""" + + url = "/api/onboarding/cloud/forgot_password" + name = "api:onboarding:cloud:forgot_password" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle forgot password request.""" + return await super()._post(request) + + class CloudLoginView(CloudOnboardingView, cloud_http.CloudLoginView): + """Login to Home Assistant Cloud.""" + + url = "/api/onboarding/cloud/login" + name = "api:onboarding:cloud:login" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle login request.""" + return await super()._post(request) + + class CloudLogoutView(CloudOnboardingView, cloud_http.CloudLogoutView): + """Log out of the Home Assistant cloud.""" + + url = "/api/onboarding/cloud/logout" + name = "api:onboarding:cloud:logout" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle logout request.""" + return await super()._post(request) + + class CloudStatusView(CloudOnboardingView): + """Get cloud status view.""" + + url = "/api/onboarding/cloud/status" + name = "api:onboarding:cloud:status" + + @with_cloud + async def get(self, request: web.Request) -> web.Response: + """Return cloud status.""" + hass = request.app[KEY_HASS] + cloud = hass.data[DATA_CLOUD] + return self.json({"logged_in": cloud.is_logged_in}) + + hass.http.register_view(CloudForgotPasswordView(data)) + hass.http.register_view(CloudLoginView(data)) + hass.http.register_view(CloudLogoutView(data)) + hass.http.register_view(CloudStatusView(data)) + + @callback def _async_get_hass_provider(hass: HomeAssistant) -> HassAuthProvider: """Get the Home Assistant auth provider.""" diff --git a/homeassistant/components/onedrive/__init__.py b/homeassistant/components/onedrive/__init__.py index 17dead653f0..f5d841683d5 100644 --- a/homeassistant/components/onedrive/__init__.py +++ b/homeassistant/components/onedrive/__init__.py @@ -106,11 +106,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: OneDriveConfigEntry) -> None: - await hass.config_entries.async_reload(entry.entry_id) - - entry.async_on_unload(entry.add_update_listener(update_listener)) - def async_notify_backup_listeners() -> None: for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): listener() diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index 8a5623772f7..2099d9aabb5 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -234,12 +234,7 @@ class OnewireOptionsFlowHandler(OptionsFlow): INPUT_ENTRY_DEVICE_SELECTION, default=self._get_current_configured_sensors(), description="Multiselect with list of devices to choose from", - ): cv.multi_select( - { - friendly_name: False - for friendly_name in self.configurable_devices - } - ), + ): cv.multi_select(dict.fromkeys(self.configurable_devices, False)), } ), errors=errors, diff --git a/homeassistant/components/onewire/onewirehub.py b/homeassistant/components/onewire/onewirehub.py index d65d7a90950..dc894a4242e 100644 --- a/homeassistant/components/onewire/onewirehub.py +++ b/homeassistant/components/onewire/onewirehub.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib from datetime import datetime, timedelta import logging import os @@ -58,7 +59,7 @@ class OneWireHub: owproxy: protocol._Proxy devices: list[OWDeviceDescription] - _version: str + _version: str | None = None def __init__(self, hass: HomeAssistant, config_entry: OneWireConfigEntry) -> None: """Initialize.""" @@ -74,7 +75,9 @@ class OneWireHub: port = self._config_entry.data[CONF_PORT] _LOGGER.debug("Initializing connection to %s:%s", host, port) self.owproxy = protocol.proxy(host, port) - self._version = self.owproxy.read(protocol.PTH_VERSION).decode() + with contextlib.suppress(protocol.OwnetError): + # Version is not available on all servers + self._version = self.owproxy.read(protocol.PTH_VERSION).decode() self.devices = _discover_devices(self.owproxy) async def initialize(self) -> None: diff --git a/homeassistant/components/onewire/strings.json b/homeassistant/components/onewire/strings.json index 46f41503d97..5e7719673b1 100644 --- a/homeassistant/components/onewire/strings.json +++ b/homeassistant/components/onewire/strings.json @@ -140,14 +140,14 @@ "device_selection": "[%key:component::onewire::options::error::device_not_selected%]" }, "description": "Select what configuration steps to process", - "title": "OneWire Device Options" + "title": "1-Wire device options" }, "configure_device": { "data": { - "precision": "Sensor Precision" + "precision": "Sensor precision" }, "description": "Select sensor precision for {sensor_id}", - "title": "OneWire Sensor Precision" + "title": "1-Wire sensor precision" } } } diff --git a/homeassistant/components/onvif/__init__.py b/homeassistant/components/onvif/__init__.py index 02e7e28ea18..09a4aba52bf 100644 --- a/homeassistant/components/onvif/__init__.py +++ b/homeassistant/components/onvif/__init__.py @@ -19,8 +19,9 @@ from homeassistant.const import ( HTTP_DIGEST_AUTHENTICATION, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import entity_registry as er from .const import ( CONF_ENABLE_WEBHOOKS, @@ -99,6 +100,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if device.capabilities.imaging: device.platforms += [Platform.SWITCH] + _async_migrate_camera_entities_unique_ids(hass, entry, device) + await hass.config_entries.async_forward_entry_setups(entry, device.platforms) entry.async_on_unload( @@ -155,3 +158,58 @@ async def async_populate_options(hass: HomeAssistant, entry: ConfigEntry) -> Non } hass.config_entries.async_update_entry(entry, options=options) + + +@callback +def _async_migrate_camera_entities_unique_ids( + hass: HomeAssistant, config_entry: ConfigEntry, device: ONVIFDevice +) -> None: + """Migrate unique ids of camera entities from profile index to profile token.""" + entity_reg = er.async_get(hass) + entities: list[er.RegistryEntry] = er.async_entries_for_config_entry( + entity_reg, config_entry.entry_id + ) + + mac_or_serial = device.info.mac or device.info.serial_number + old_uid_start = f"{mac_or_serial}_" + new_uid_start = f"{mac_or_serial}#" + + for entity in entities: + if entity.domain != Platform.CAMERA: + continue + + if ( + not entity.unique_id.startswith(old_uid_start) + and entity.unique_id != mac_or_serial + ): + continue + + index = 0 + if entity.unique_id.startswith(old_uid_start): + try: + index = int(entity.unique_id[len(old_uid_start) :]) + except ValueError: + LOGGER.error( + "Failed to migrate unique id for '%s' as the ONVIF profile index could not be parsed from unique id '%s'", + entity.entity_id, + entity.unique_id, + ) + continue + try: + token = device.profiles[index].token + except IndexError: + LOGGER.error( + "Failed to migrate unique id for '%s' as the ONVIF profile index '%d' parsed from unique id '%s' could not be found", + entity.entity_id, + index, + entity.unique_id, + ) + continue + new_uid = f"{new_uid_start}{token}" + LOGGER.debug( + "Migrating unique id for '%s' from '%s' to '%s'", + entity.entity_id, + entity.unique_id, + new_uid, + ) + entity_reg.async_update_entity(entity.entity_id, new_unique_id=new_uid) diff --git a/homeassistant/components/onvif/camera.py b/homeassistant/components/onvif/camera.py index da99e170ff6..fc17e912fcc 100644 --- a/homeassistant/components/onvif/camera.py +++ b/homeassistant/components/onvif/camera.py @@ -117,10 +117,7 @@ class ONVIFCameraEntity(ONVIFBaseEntity, Camera): self._attr_entity_registry_enabled_default = ( device.max_resolution == profile.video.resolution.width ) - if profile.index: - self._attr_unique_id = f"{self.mac_or_serial}_{profile.index}" - else: - self._attr_unique_id = self.mac_or_serial + self._attr_unique_id = f"{self.mac_or_serial}#{profile.token}" self._attr_name = f"{device.name} {profile.name}" @property diff --git a/homeassistant/components/onvif/event.py b/homeassistant/components/onvif/event.py index b7b34f7be9f..d1b93304ccc 100644 --- a/homeassistant/components/onvif/event.py +++ b/homeassistant/components/onvif/event.py @@ -174,11 +174,20 @@ class EventManager: UNHANDLED_TOPICS.add(topic) continue - event = await parser(unique_id, msg) + try: + event = await parser(unique_id, msg) + error = None + except (AttributeError, KeyError) as e: + event = None + error = e if not event: LOGGER.warning( - "%s: Unable to parse event from %s: %s", self.name, unique_id, msg + "%s: Unable to parse event from %s: %s: %s", + self.name, + unique_id, + error, + msg, ) return diff --git a/homeassistant/components/onvif/parsers.py b/homeassistant/components/onvif/parsers.py index 6eb1d001796..e5a731c73f6 100644 --- a/homeassistant/components/onvif/parsers.py +++ b/homeassistant/components/onvif/parsers.py @@ -49,24 +49,22 @@ def local_datetime_or_none(value: str) -> datetime.datetime | None: @PARSERS.register("tns1:VideoSource/MotionAlarm") +@PARSERS.register("tns1:Device/Trigger/tnshik:AlarmIn") async def async_parse_motion_alarm(uid: str, msg) -> Event | None: """Handle parsing event message. Topic: tns1:VideoSource/MotionAlarm """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Motion Alarm", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Motion Alarm", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:VideoSource/ImageTooBlurry/AnalyticsService") @@ -77,20 +75,17 @@ async def async_parse_image_too_blurry(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooBlurry/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Blurry", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Blurry", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/ImageTooDark/AnalyticsService") @@ -101,20 +96,17 @@ async def async_parse_image_too_dark(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooDark/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Dark", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Dark", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/ImageTooBright/AnalyticsService") @@ -125,20 +117,17 @@ async def async_parse_image_too_bright(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooBright/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Bright", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Bright", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/GlobalSceneChange/AnalyticsService") @@ -149,19 +138,16 @@ async def async_parse_scene_change(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/GlobalSceneChange/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Global Scene Change", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Global Scene Change", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:AudioAnalytics/Audio/DetectedSound") @@ -170,29 +156,26 @@ async def async_parse_detected_sound(uid: str, msg) -> Event | None: Topic: tns1:AudioAnalytics/Audio/DetectedSound """ - try: - audio_source = "" - audio_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "AudioSourceConfigurationToken": - audio_source = source.Value - if source.Name == "AudioAnalyticsConfigurationToken": - audio_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + audio_source = "" + audio_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "AudioSourceConfigurationToken": + audio_source = source.Value + if source.Name == "AudioAnalyticsConfigurationToken": + audio_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}", - "Detected Sound", - "binary_sensor", - "sound", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}", + "Detected Sound", + "binary_sensor", + "sound", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/FieldDetector/ObjectsInside") @@ -201,30 +184,26 @@ async def async_parse_field_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/FieldDetector/ObjectsInside """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - evt = Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Field Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None - return evt + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Field Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/CellMotionDetector/Motion") @@ -233,29 +212,26 @@ async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/CellMotionDetector/Motion """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Cell Motion Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Cell Motion Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MotionRegionDetector/Motion") @@ -264,29 +240,26 @@ async def async_parse_motion_region_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MotionRegionDetector/Motion """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Motion Region Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value in ["1", "true"], - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Motion Region Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value in ["1", "true"], + ) @PARSERS.register("tns1:RuleEngine/TamperDetector/Tamper") @@ -295,30 +268,27 @@ async def async_parse_tamper_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/TamperDetector/Tamper """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Tamper Detection", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Tamper Detection", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/DogCatDetect") @@ -327,23 +297,20 @@ async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/DogCatDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Pet Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Pet Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/VehicleDetect") @@ -352,23 +319,20 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/VehicleDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Vehicle Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Vehicle Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) _TAPO_EVENT_TEMPLATES: dict[str, Event] = { @@ -420,32 +384,28 @@ async def async_parse_tplink_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/PeopleDetector/People Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - for item in payload.Data.SimpleItem: - event_template = _TAPO_EVENT_TEMPLATES.get(item.Name, None) - if event_template is None: - continue + for item in payload.Data.SimpleItem: + event_template = _TAPO_EVENT_TEMPLATES.get(item.Name, None) + if event_template is None: + continue - return dataclasses.replace( - event_template, - uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - value=item.Value == "true", - ) - - except (AttributeError, KeyError): - return None + return dataclasses.replace( + event_template, + uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + value=item.Value == "true", + ) return None @@ -456,23 +416,20 @@ async def async_parse_person_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/PeopleDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Person Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Person Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/FaceDetect") @@ -481,23 +438,20 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/FaceDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Face Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Face Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor") @@ -506,23 +460,42 @@ async def async_parse_visitor_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/Visitor """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Visitor Detection", - "binary_sensor", - "occupancy", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Visitor Detection", + "binary_sensor", + "occupancy", + None, + payload.Data.SimpleItem[0].Value == "true", + ) + + +@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Package") +async def async_parse_package_detector(uid: str, msg) -> Event | None: + """Handle parsing event message. + + Topic: tns1:RuleEngine/MyRuleDetector/Package + """ + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) + + return Event( + f"{uid}_{topic}_{video_source}", + "Package Detection", + "binary_sensor", + "occupancy", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:Device/Trigger/DigitalInput") @@ -531,19 +504,16 @@ async def async_parse_digital_input(uid: str, msg) -> Event | None: Topic: tns1:Device/Trigger/DigitalInput """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Digital Input", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Digital Input", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:Device/Trigger/Relay") @@ -552,19 +522,16 @@ async def async_parse_relay(uid: str, msg) -> Event | None: Topic: tns1:Device/Trigger/Relay """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Relay Triggered", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "active", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Relay Triggered", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "active", + ) @PARSERS.register("tns1:Device/HardwareFailure/StorageFailure") @@ -573,20 +540,17 @@ async def async_parse_storage_failure(uid: str, msg) -> Event | None: Topic: tns1:Device/HardwareFailure/StorageFailure """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Storage Failure", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Storage Failure", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/ProcessorUsage") @@ -595,23 +559,20 @@ async def async_parse_processor_usage(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/ProcessorUsage """ - try: - topic, payload = extract_message(msg) - usage = float(payload.Data.SimpleItem[0].Value) - if usage <= 1: - usage *= 100 + topic, payload = extract_message(msg) + usage = float(payload.Data.SimpleItem[0].Value) + if usage <= 1: + usage *= 100 - return Event( - f"{uid}_{topic}", - "Processor Usage", - "sensor", - None, - "percent", - int(usage), - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}", + "Processor Usage", + "sensor", + None, + "percent", + int(usage), + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastReboot") @@ -620,20 +581,17 @@ async def async_parse_last_reboot(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastReboot """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Reboot", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Reboot", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastReset") @@ -642,21 +600,18 @@ async def async_parse_last_reset(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastReset """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Reset", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Reset", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:Monitoring/Backup/Last") @@ -665,22 +620,18 @@ async def async_parse_backup_last(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/Backup/Last """ - - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Backup", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Backup", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastClockSynchronization") @@ -689,21 +640,18 @@ async def async_parse_last_clock_sync(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastClockSynchronization """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Clock Synchronization", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Clock Synchronization", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:RecordingConfig/JobState") @@ -713,20 +661,17 @@ async def async_parse_jobstate(uid: str, msg) -> Event | None: Topic: tns1:RecordingConfig/JobState """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Recording Job State", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "Active", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Recording Job State", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "Active", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/LineDetector/Crossed") @@ -735,30 +680,27 @@ async def async_parse_linedetector_crossed(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/LineDetector/Crossed """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = source.Value - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = source.Value + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Line Detector Crossed", - "sensor", - None, - None, - payload.Data.SimpleItem[0].Value, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Line Detector Crossed", + "sensor", + None, + None, + payload.Data.SimpleItem[0].Value, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/CountAggregation/Counter") @@ -767,30 +709,27 @@ async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/CountAggregation/Counter """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Count Aggregation Counter", - "sensor", - None, - None, - payload.Data.SimpleItem[0].Value, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Count Aggregation Counter", + "sensor", + None, + None, + payload.Data.SimpleItem[0].Value, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:UserAlarm/IVA/HumanShapeDetect") @@ -799,21 +738,18 @@ async def async_parse_human_shape_detect(uid: str, msg) -> Event | None: Topic: tns1:UserAlarm/IVA/HumanShapeDetect """ - try: - topic, payload = extract_message(msg) - video_source = "" - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - break + topic, payload = extract_message(msg) + video_source = "" + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + break - return Event( - f"{uid}_{topic}_{video_source}", - "Human Shape Detect", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Human Shape Detect", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) diff --git a/homeassistant/components/onvif/strings.json b/homeassistant/components/onvif/strings.json index 0afb5e59e8e..7988c50b1ac 100644 --- a/homeassistant/components/onvif/strings.json +++ b/homeassistant/components/onvif/strings.json @@ -62,12 +62,12 @@ "step": { "onvif_devices": { "data": { - "extra_arguments": "Extra FFMPEG arguments", + "extra_arguments": "Extra FFmpeg arguments", "rtsp_transport": "RTSP transport mechanism", "use_wallclock_as_timestamps": "Use wall clock as timestamps", - "enable_webhooks": "Enable Webhooks" + "enable_webhooks": "Enable webhooks" }, - "title": "ONVIF Device Options" + "title": "ONVIF device options" } } }, diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 0fbda9b7f4a..fcf6ab298dc 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -2,7 +2,20 @@ from __future__ import annotations +import base64 +from mimetypes import guess_file_type +from pathlib import Path + import openai +from openai.types.images_response import ImagesResponse +from openai.types.responses import ( + EasyInputMessageParam, + Response, + ResponseInputImageParam, + ResponseInputMessageContentListParam, + ResponseInputParam, + ResponseInputTextParam, +) import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -22,15 +35,41 @@ from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, LOGGER +from .const import ( + CONF_CHAT_MODEL, + CONF_FILENAMES, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_REASONING_EFFORT, + CONF_TEMPERATURE, + CONF_TOP_P, + DOMAIN, + LOGGER, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_REASONING_EFFORT, + RECOMMENDED_TEMPERATURE, + RECOMMENDED_TOP_P, +) SERVICE_GENERATE_IMAGE = "generate_image" +SERVICE_GENERATE_CONTENT = "generate_content" + PLATFORMS = (Platform.CONVERSATION,) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) type OpenAIConfigEntry = ConfigEntry[openai.AsyncClient] +def encode_file(file_path: str) -> tuple[str, str]: + """Return base64 version of file contents.""" + mime_type, _ = guess_file_type(file_path) + if mime_type is None: + mime_type = "application/octet-stream" + with open(file_path, "rb") as image_file: + return (mime_type, base64.b64encode(image_file.read()).decode("utf-8")) + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up OpenAI Conversation.""" @@ -49,9 +88,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: client: openai.AsyncClient = entry.runtime_data try: - response = await client.images.generate( + response: ImagesResponse = await client.images.generate( model="dall-e-3", - prompt=call.data["prompt"], + prompt=call.data[CONF_PROMPT], size=call.data["size"], quality=call.data["quality"], style=call.data["style"], @@ -63,6 +102,108 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return response.data[0].model_dump(exclude={"b64_json"}) + async def send_prompt(call: ServiceCall) -> ServiceResponse: + """Send a prompt to ChatGPT and return the response.""" + entry_id = call.data["config_entry"] + entry = hass.config_entries.async_get_entry(entry_id) + + if entry is None or entry.domain != DOMAIN: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_config_entry", + translation_placeholders={"config_entry": entry_id}, + ) + + model: str = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) + client: openai.AsyncClient = entry.runtime_data + + content: ResponseInputMessageContentListParam = [ + ResponseInputTextParam(type="input_text", text=call.data[CONF_PROMPT]) + ] + + def append_files_to_content() -> None: + for filename in call.data[CONF_FILENAMES]: + if not hass.config.is_allowed_path(filename): + raise HomeAssistantError( + f"Cannot read `{filename}`, no access to path; " + "`allowlist_external_dirs` may need to be adjusted in " + "`configuration.yaml`" + ) + if not Path(filename).exists(): + raise HomeAssistantError(f"`{filename}` does not exist") + mime_type, base64_file = encode_file(filename) + if "image/" not in mime_type: + raise HomeAssistantError( + "Only images are supported by the OpenAI API," + f"`{filename}` is not an image file" + ) + content.append( + ResponseInputImageParam( + type="input_image", + file_id=filename, + image_url=f"data:{mime_type};base64,{base64_file}", + detail="auto", + ) + ) + + if CONF_FILENAMES in call.data: + await hass.async_add_executor_job(append_files_to_content) + + messages: ResponseInputParam = [ + EasyInputMessageParam(type="message", role="user", content=content) + ] + + try: + model_args = { + "model": model, + "input": messages, + "max_output_tokens": entry.options.get( + CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS + ), + "top_p": entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P), + "temperature": entry.options.get( + CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE + ), + "user": call.context.user_id, + "store": False, + } + + if model.startswith("o"): + model_args["reasoning"] = { + "effort": entry.options.get( + CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT + ) + } + + response: Response = await client.responses.create(**model_args) + + except openai.OpenAIError as err: + raise HomeAssistantError(f"Error generating content: {err}") from err + except FileNotFoundError as err: + raise HomeAssistantError(f"Error generating content: {err}") from err + + return {"text": response.output_text} + + hass.services.async_register( + DOMAIN, + SERVICE_GENERATE_CONTENT, + send_prompt, + schema=vol.Schema( + { + vol.Required("config_entry"): selector.ConfigEntrySelector( + { + "integration": DOMAIN, + } + ), + vol.Required(CONF_PROMPT): cv.string, + vol.Optional(CONF_FILENAMES, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + } + ), + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( DOMAIN, SERVICE_GENERATE_IMAGE, @@ -74,7 +215,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "integration": DOMAIN, } ), - vol.Required("prompt"): cv.string, + vol.Required(CONF_PROMPT): cv.string, vol.Optional("size", default="1024x1024"): vol.In( ("1024x1024", "1024x1792", "1792x1024") ), @@ -84,6 +225,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ), supports_response=SupportsResponse.ONLY, ) + return True diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index c631884ea0b..7304eb52da3 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -2,22 +2,31 @@ from __future__ import annotations +import json import logging from types import MappingProxyType from typing import Any import openai import voluptuous as vol +from voluptuous_openapi import convert +from homeassistant.components.zone import ENTITY_ID_HOME from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + CONF_API_KEY, + CONF_LLM_HASS_API, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import llm +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( NumberSelector, NumberSelectorConfig, @@ -37,12 +46,22 @@ from .const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + RECOMMENDED_WEB_SEARCH_USER_LOCATION, UNSUPPORTED_MODELS, ) @@ -66,7 +85,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - client = openai.AsyncOpenAI(api_key=data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=data[CONF_API_KEY], http_client=get_async_client(hass) + ) await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) @@ -137,7 +158,16 @@ class OpenAIOptionsFlow(OptionsFlow): if user_input.get(CONF_CHAT_MODEL) in UNSUPPORTED_MODELS: errors[CONF_CHAT_MODEL] = "model_not_supported" - else: + + if user_input.get(CONF_WEB_SEARCH): + if not user_input.get( + CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL + ).startswith("gpt-4o"): + errors[CONF_WEB_SEARCH] = "web_search_not_supported" + elif user_input.get(CONF_WEB_SEARCH_USER_LOCATION): + user_input.update(await self.get_location_data()) + + if not errors: return self.async_create_entry(title="", data=user_input) else: # Re-render the options again, now with the recommended options shown/hidden @@ -156,6 +186,59 @@ class OpenAIOptionsFlow(OptionsFlow): errors=errors, ) + async def get_location_data(self) -> dict[str, str]: + """Get approximate location data of the user.""" + location_data: dict[str, str] = {} + zone_home = self.hass.states.get(ENTITY_ID_HOME) + if zone_home is not None: + client = openai.AsyncOpenAI( + api_key=self.config_entry.data[CONF_API_KEY], + http_client=get_async_client(self.hass), + ) + location_schema = vol.Schema( + { + vol.Optional( + CONF_WEB_SEARCH_CITY, + description="Free text input for the city, e.g. `San Francisco`", + ): str, + vol.Optional( + CONF_WEB_SEARCH_REGION, + description="Free text input for the region, e.g. `California`", + ): str, + } + ) + response = await client.responses.create( + model=RECOMMENDED_CHAT_MODEL, + input=[ + { + "role": "system", + "content": "Where are the following coordinates located: " + f"({zone_home.attributes[ATTR_LATITUDE]}," + f" {zone_home.attributes[ATTR_LONGITUDE]})?", + } + ], + text={ + "format": { + "type": "json_schema", + "name": "approximate_location", + "description": "Approximate location data of the user " + "for refined web search results", + "schema": convert(location_schema), + "strict": False, + } + }, + store=False, + ) + location_data = location_schema(json.loads(response.output_text) or {}) + + if self.hass.config.country: + location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country + location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone + + _LOGGER.debug("Location data: %s", location_data) + + return location_data + def openai_config_option_schema( hass: HomeAssistant, @@ -227,10 +310,35 @@ def openai_config_option_schema( ): SelectSelector( SelectSelectorConfig( options=["low", "medium", "high"], - translation_key="reasoning_effort", + translation_key=CONF_REASONING_EFFORT, mode=SelectSelectorMode.DROPDOWN, ) ), + vol.Optional( + CONF_WEB_SEARCH, + description={"suggested_value": options.get(CONF_WEB_SEARCH)}, + default=RECOMMENDED_WEB_SEARCH, + ): bool, + vol.Optional( + CONF_WEB_SEARCH_CONTEXT_SIZE, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_CONTEXT_SIZE) + }, + default=RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + ): SelectSelector( + SelectSelectorConfig( + options=["low", "medium", "high"], + translation_key=CONF_WEB_SEARCH_CONTEXT_SIZE, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + vol.Optional( + CONF_WEB_SEARCH_USER_LOCATION, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_USER_LOCATION) + }, + default=RECOMMENDED_WEB_SEARCH_USER_LOCATION, + ): bool, } ) return schema diff --git a/homeassistant/components/openai_conversation/const.py b/homeassistant/components/openai_conversation/const.py index 793e021e332..41abc504219 100644 --- a/homeassistant/components/openai_conversation/const.py +++ b/homeassistant/components/openai_conversation/const.py @@ -3,22 +3,34 @@ import logging DOMAIN = "openai_conversation" -LOGGER = logging.getLogger(__package__) +LOGGER: logging.Logger = logging.getLogger(__package__) -CONF_RECOMMENDED = "recommended" -CONF_PROMPT = "prompt" CONF_CHAT_MODEL = "chat_model" -RECOMMENDED_CHAT_MODEL = "gpt-4o-mini" +CONF_FILENAMES = "filenames" CONF_MAX_TOKENS = "max_tokens" -RECOMMENDED_MAX_TOKENS = 150 -CONF_TOP_P = "top_p" -RECOMMENDED_TOP_P = 1.0 -CONF_TEMPERATURE = "temperature" -RECOMMENDED_TEMPERATURE = 1.0 +CONF_PROMPT = "prompt" +CONF_PROMPT = "prompt" CONF_REASONING_EFFORT = "reasoning_effort" +CONF_RECOMMENDED = "recommended" +CONF_TEMPERATURE = "temperature" +CONF_TOP_P = "top_p" +CONF_WEB_SEARCH = "web_search" +CONF_WEB_SEARCH_USER_LOCATION = "user_location" +CONF_WEB_SEARCH_CONTEXT_SIZE = "search_context_size" +CONF_WEB_SEARCH_CITY = "city" +CONF_WEB_SEARCH_REGION = "region" +CONF_WEB_SEARCH_COUNTRY = "country" +CONF_WEB_SEARCH_TIMEZONE = "timezone" +RECOMMENDED_CHAT_MODEL = "gpt-4o-mini" +RECOMMENDED_MAX_TOKENS = 150 RECOMMENDED_REASONING_EFFORT = "low" +RECOMMENDED_TEMPERATURE = 1.0 +RECOMMENDED_TOP_P = 1.0 +RECOMMENDED_WEB_SEARCH = False +RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE = "medium" +RECOMMENDED_WEB_SEARCH_USER_LOCATION = False -UNSUPPORTED_MODELS = [ +UNSUPPORTED_MODELS: list[str] = [ "o1-mini", "o1-mini-2024-09-12", "o1-preview", diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index 37be41947f7..026e18f3ce1 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -2,21 +2,31 @@ from collections.abc import AsyncGenerator, Callable import json -from typing import Any, Literal, cast +from typing import Any, Literal import openai from openai._streaming import AsyncStream -from openai._types import NOT_GIVEN -from openai.types.chat import ( - ChatCompletionAssistantMessageParam, - ChatCompletionChunk, - ChatCompletionMessageParam, - ChatCompletionMessageToolCallParam, - ChatCompletionToolMessageParam, - ChatCompletionToolParam, +from openai.types.responses import ( + EasyInputMessageParam, + FunctionToolParam, + ResponseCompletedEvent, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseFunctionToolCall, + ResponseFunctionToolCallParam, + ResponseIncompleteEvent, + ResponseInputParam, + ResponseOutputItemAddedEvent, + ResponseOutputMessage, + ResponseStreamEvent, + ResponseTextDeltaEvent, + ToolParam, + WebSearchToolParam, ) -from openai.types.chat.chat_completion_message_tool_call_param import Function -from openai.types.shared_params import FunctionDefinition +from openai.types.responses.response_input_param import FunctionCallOutput +from openai.types.responses.web_search_tool_param import UserLocation from voluptuous_openapi import convert from homeassistant.components import assist_pipeline, conversation @@ -24,7 +34,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import OpenAIConfigEntry @@ -35,6 +45,13 @@ from .const import ( CONF_REASONING_EFFORT, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -42,6 +59,7 @@ from .const import ( RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, ) # Max number of back and forth with the LLM to generate a response @@ -60,122 +78,131 @@ async def async_setup_entry( def _format_tool( tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None -) -> ChatCompletionToolParam: +) -> FunctionToolParam: """Format tool specification.""" - tool_spec = FunctionDefinition( + return FunctionToolParam( + type="function", name=tool.name, parameters=convert(tool.parameters, custom_serializer=custom_serializer), + description=tool.description, + strict=False, ) - if tool.description: - tool_spec["description"] = tool.description - return ChatCompletionToolParam(type="function", function=tool_spec) def _convert_content_to_param( content: conversation.Content, -) -> ChatCompletionMessageParam: +) -> ResponseInputParam: """Convert any native chat message for this agent to the native format.""" - if content.role == "tool_result": - assert type(content) is conversation.ToolResultContent - return ChatCompletionToolMessageParam( - role="tool", - tool_call_id=content.tool_call_id, - content=json.dumps(content.tool_result), - ) - if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr] - role = content.role + messages: ResponseInputParam = [] + if isinstance(content, conversation.ToolResultContent): + return [ + FunctionCallOutput( + type="function_call_output", + call_id=content.tool_call_id, + output=json.dumps(content.tool_result), + ) + ] + + if content.content: + role: Literal["user", "assistant", "system", "developer"] = content.role if role == "system": role = "developer" - return cast( - ChatCompletionMessageParam, - {"role": content.role, "content": content.content}, # type: ignore[union-attr] + messages.append( + EasyInputMessageParam(type="message", role=role, content=content.content) ) - # Handle the Assistant content including tool calls. - assert type(content) is conversation.AssistantContent - return ChatCompletionAssistantMessageParam( - role="assistant", - content=content.content, - tool_calls=[ - ChatCompletionMessageToolCallParam( - id=tool_call.id, - function=Function( - arguments=json.dumps(tool_call.tool_args), - name=tool_call.tool_name, - ), - type="function", + if isinstance(content, conversation.AssistantContent) and content.tool_calls: + messages.extend( + ResponseFunctionToolCallParam( + type="function_call", + name=tool_call.tool_name, + arguments=json.dumps(tool_call.tool_args), + call_id=tool_call.id, ) for tool_call in content.tool_calls - ], - ) + ) + return messages async def _transform_stream( - result: AsyncStream[ChatCompletionChunk], + chat_log: conversation.ChatLog, + result: AsyncStream[ResponseStreamEvent], ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: """Transform an OpenAI delta stream into HA format.""" - current_tool_call: dict | None = None + async for event in result: + LOGGER.debug("Received event: %s", event) - async for chunk in result: - LOGGER.debug("Received chunk: %s", chunk) - choice = chunk.choices[0] - - if choice.finish_reason: - if current_tool_call: - yield { - "tool_calls": [ - llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["tool_name"], - tool_args=json.loads(current_tool_call["tool_args"]), - ) - ] - } - - break - - delta = chunk.choices[0].delta - - # We can yield delta messages not continuing or starting tool calls - if current_tool_call is None and not delta.tool_calls: - yield { # type: ignore[misc] - key: value - for key in ("role", "content") - if (value := getattr(delta, key)) is not None - } - continue - - # When doing tool calls, we should always have a tool call - # object or we have gotten stopped above with a finish_reason set. - if ( - not delta.tool_calls - or not (delta_tool_call := delta.tool_calls[0]) - or not delta_tool_call.function - ): - raise ValueError("Expected delta with tool call") - - if current_tool_call and delta_tool_call.index == current_tool_call["index"]: - current_tool_call["tool_args"] += delta_tool_call.function.arguments or "" - continue - - # We got tool call with new index, so we need to yield the previous - if current_tool_call: + if isinstance(event, ResponseOutputItemAddedEvent): + if isinstance(event.item, ResponseOutputMessage): + yield {"role": event.item.role} + elif isinstance(event.item, ResponseFunctionToolCall): + current_tool_call = event.item + elif isinstance(event, ResponseTextDeltaEvent): + yield {"content": event.delta} + elif isinstance(event, ResponseFunctionCallArgumentsDeltaEvent): + current_tool_call.arguments += event.delta + elif isinstance(event, ResponseFunctionCallArgumentsDoneEvent): + current_tool_call.status = "completed" yield { "tool_calls": [ llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["tool_name"], - tool_args=json.loads(current_tool_call["tool_args"]), + id=current_tool_call.call_id, + tool_name=current_tool_call.name, + tool_args=json.loads(current_tool_call.arguments), ) ] } + elif isinstance(event, ResponseCompletedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + elif isinstance(event, ResponseIncompleteEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) - current_tool_call = { - "index": delta_tool_call.index, - "id": delta_tool_call.id, - "tool_name": delta_tool_call.function.name, - "tool_args": delta_tool_call.function.arguments or "", - } + if ( + event.response.incomplete_details + and event.response.incomplete_details.reason + ): + reason: str = event.response.incomplete_details.reason + else: + reason = "unknown reason" + + if reason == "max_output_tokens": + reason = "max output tokens reached" + elif reason == "content_filter": + reason = "content filter triggered" + + raise HomeAssistantError(f"OpenAI response incomplete: {reason}") + elif isinstance(event, ResponseFailedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + reason = "unknown reason" + if event.response.error is not None: + reason = event.response.error.message + raise HomeAssistantError(f"OpenAI response failed: {reason}") + elif isinstance(event, ResponseErrorEvent): + raise HomeAssistantError(f"OpenAI response error: {event.message}") class OpenAIConversationEntity( @@ -223,18 +250,6 @@ class OpenAIConversationEntity( conversation.async_unset_agent(self.hass, self.entry) await super().async_will_remove_from_hass() - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, @@ -253,15 +268,38 @@ class OpenAIConversationEntity( except conversation.ConverseError as err: return err.as_conversation_result() - tools: list[ChatCompletionToolParam] | None = None + tools: list[ToolParam] | None = None if chat_log.llm_api: tools = [ _format_tool(tool, chat_log.llm_api.custom_serializer) for tool in chat_log.llm_api.tools ] + if options.get(CONF_WEB_SEARCH): + web_search = WebSearchToolParam( + type="web_search_preview", + search_context_size=options.get( + CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE + ), + ) + if options.get(CONF_WEB_SEARCH_USER_LOCATION): + web_search["user_location"] = UserLocation( + type="approximate", + city=options.get(CONF_WEB_SEARCH_CITY, ""), + region=options.get(CONF_WEB_SEARCH_REGION, ""), + country=options.get(CONF_WEB_SEARCH_COUNTRY, ""), + timezone=options.get(CONF_WEB_SEARCH_TIMEZONE, ""), + ) + if tools is None: + tools = [] + tools.append(web_search) + model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) - messages = [_convert_content_to_param(content) for content in chat_log.content] + messages = [ + m + for content in chat_log.content + for m in _convert_content_to_param(content) + ] client = self.entry.runtime_data @@ -269,24 +307,28 @@ class OpenAIConversationEntity( for _iteration in range(MAX_TOOL_ITERATIONS): model_args = { "model": model, - "messages": messages, - "tools": tools or NOT_GIVEN, - "max_completion_tokens": options.get( + "input": messages, + "max_output_tokens": options.get( CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS ), "top_p": options.get(CONF_TOP_P, RECOMMENDED_TOP_P), "temperature": options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), "user": chat_log.conversation_id, + "store": False, "stream": True, } + if tools: + model_args["tools"] = tools if model.startswith("o"): - model_args["reasoning_effort"] = options.get( - CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT - ) + model_args["reasoning"] = { + "effort": options.get( + CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT + ) + } try: - result = await client.chat.completions.create(**model_args) + result = await client.responses.create(**model_args) except openai.RateLimitError as err: LOGGER.error("Rate limited by OpenAI: %s", err) raise HomeAssistantError("Rate limited or insufficient funds") from err @@ -294,14 +336,10 @@ class OpenAIConversationEntity( LOGGER.error("Error talking to OpenAI: %s", err) raise HomeAssistantError("Error talking to OpenAI") from err - messages.extend( - [ - _convert_content_to_param(content) - async for content in chat_log.async_add_delta_content_stream( - user_input.agent_id, _transform_stream(result) - ) - ] - ) + async for content in chat_log.async_add_delta_content_stream( + user_input.agent_id, _transform_stream(chat_log, result) + ): + messages.extend(_convert_content_to_param(content)) if not chat_log.unresponded_tool_results: break diff --git a/homeassistant/components/openai_conversation/icons.json b/homeassistant/components/openai_conversation/icons.json index 3abecd640d1..f0ece31c304 100644 --- a/homeassistant/components/openai_conversation/icons.json +++ b/homeassistant/components/openai_conversation/icons.json @@ -2,6 +2,9 @@ "services": { "generate_image": { "service": "mdi:image-sync" + }, + "generate_content": { + "service": "mdi:receipt-text" } } } diff --git a/homeassistant/components/openai_conversation/manifest.json b/homeassistant/components/openai_conversation/manifest.json index a7aa7884dc4..988dd2321d5 100644 --- a/homeassistant/components/openai_conversation/manifest.json +++ b/homeassistant/components/openai_conversation/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/openai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["openai==1.61.0"] + "requirements": ["openai==1.68.2"] } diff --git a/homeassistant/components/openai_conversation/services.yaml b/homeassistant/components/openai_conversation/services.yaml index 3db71cae383..75fa097f25d 100644 --- a/homeassistant/components/openai_conversation/services.yaml +++ b/homeassistant/components/openai_conversation/services.yaml @@ -38,3 +38,23 @@ generate_image: options: - "vivid" - "natural" +generate_content: + fields: + config_entry: + required: true + selector: + config_entry: + integration: openai_conversation + prompt: + required: true + selector: + text: + multiline: true + example: "Hello, how can I help you?" + filenames: + selector: + text: + multiline: true + example: | + - /path/to/file1.txt + - /path/to/file2.txt diff --git a/homeassistant/components/openai_conversation/strings.json b/homeassistant/components/openai_conversation/strings.json index aba4fdc3d40..a373ec448d7 100644 --- a/homeassistant/components/openai_conversation/strings.json +++ b/homeassistant/components/openai_conversation/strings.json @@ -24,16 +24,23 @@ "top_p": "Top P", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "recommended": "Recommended model settings", - "reasoning_effort": "Reasoning effort" + "reasoning_effort": "Reasoning effort", + "web_search": "Enable web search", + "search_context_size": "Search context size", + "user_location": "Include home location" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template.", - "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)" + "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)", + "web_search": "Allow the model to search the web for the latest information before generating a response", + "search_context_size": "High level guidance for the amount of context window space to use for the search", + "user_location": "Refine search results based on geography" } } }, "error": { - "model_not_supported": "This model is not supported, please select a different model" + "model_not_supported": "This model is not supported, please select a different model", + "web_search_not_supported": "Web search is only supported for gpt-4o and gpt-4o-mini models" } }, "selector": { @@ -43,6 +50,13 @@ "medium": "Medium", "high": "High" } + }, + "search_context_size": { + "options": { + "low": "Low", + "medium": "Medium", + "high": "High" + } } }, "services": { @@ -72,6 +86,24 @@ "description": "The style of the generated image" } } + }, + "generate_content": { + "name": "Generate content", + "description": "Sends a conversational query to ChatGPT including any attached image files", + "fields": { + "config_entry": { + "name": "Config entry", + "description": "The config entry to use for this action" + }, + "prompt": { + "name": "Prompt", + "description": "The prompt to send" + }, + "filenames": { + "name": "Files", + "description": "List of files to upload" + } + } } }, "exceptions": { diff --git a/homeassistant/components/openweathermap/sensor.py b/homeassistant/components/openweathermap/sensor.py index 0afab69b638..a595652d90b 100644 --- a/homeassistant/components/openweathermap/sensor.py +++ b/homeassistant/components/openweathermap/sensor.py @@ -89,7 +89,8 @@ WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( key=ATTR_API_WIND_BEARING, name="Wind bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), SensorEntityDescription( key=ATTR_API_HUMIDITY, diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index aed89ccf46e..e8b6dbf9718 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -16,7 +16,11 @@ from opower import ( from opower.exceptions import ApiException, CannotConnect, InvalidAuth from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -201,7 +205,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): f"{account.meter_type.name.lower()} {account.utility_account_id}" ) cost_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} cost", source=DOMAIN, @@ -209,7 +213,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): unit_of_measurement=None, ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} consumption", source=DOMAIN, diff --git a/homeassistant/components/opower/strings.json b/homeassistant/components/opower/strings.json index 362e6cd7596..749545743fe 100644 --- a/homeassistant/components/opower/strings.json +++ b/homeassistant/components/opower/strings.json @@ -11,7 +11,7 @@ "mfa": { "description": "The TOTP secret below is not one of the 6 digit time-based numeric codes. It is a string of around 16 characters containing the shared secret that enables your authenticator app to generate the correct time-based code at the appropriate time. See the documentation.", "data": { - "totp_secret": "TOTP Secret" + "totp_secret": "TOTP secret" } }, "reauth_confirm": { @@ -19,7 +19,7 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "totp_secret": "TOTP Secret" + "totp_secret": "[%key:component::opower::config::step::mfa::data::totp_secret%]" } } }, diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 07ec02d76a6..937b4ccb937 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -13,7 +13,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.16.2"], + "requirements": ["pyoverkiz==1.16.5"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 9214398a37b..cec0d0d2571 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -70,6 +70,15 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ options=["full", "normal", "medium", "low", "verylow"], translation_key="battery", ), + OverkizSensorDescription( + key=OverkizState.CORE_BATTERY_DISCRETE_LEVEL, + name="Battery", + entity_category=EntityCategory.DIAGNOSTIC, + icon="mdi:battery", + device_class=SensorDeviceClass.ENUM, + options=["good", "medium", "low", "critical"], + translation_key="battery", + ), OverkizSensorDescription( key=OverkizState.CORE_RSSI_LEVEL, name="RSSI level", diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 0c564a003d6..05b5eac4b21 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -123,7 +123,9 @@ "low": "Low", "normal": "Normal", "medium": "Medium", - "verylow": "Very low" + "verylow": "Very low", + "good": "Good", + "critical": "Critical" } }, "discrete_rssi_level": { diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py index 9895ea84c2c..2960cefe10c 100644 --- a/homeassistant/components/overkiz/water_heater/__init__.py +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -13,6 +13,9 @@ from ..entity import OverkizEntity from .atlantic_domestic_hot_water_production_mlb_component import ( AtlanticDomesticHotWaterProductionMBLComponent, ) +from .atlantic_domestic_hot_water_production_v2_io_component import ( + AtlanticDomesticHotWaterProductionV2IOComponent, +) from .atlantic_pass_apc_dhw import AtlanticPassAPCDHW from .domestic_hot_water_production import DomesticHotWaterProduction from .hitachi_dhw import HitachiDHW @@ -52,4 +55,5 @@ WIDGET_TO_WATER_HEATER_ENTITY = { CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY = { "modbuslink:AtlanticDomesticHotWaterProductionMBLComponent": AtlanticDomesticHotWaterProductionMBLComponent, + "io:AtlanticDomesticHotWaterProductionV2_CV4E_IOComponent": AtlanticDomesticHotWaterProductionV2IOComponent, } diff --git a/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py new file mode 100644 index 00000000000..7e7db07f847 --- /dev/null +++ b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py @@ -0,0 +1,332 @@ +"""Support for AtlanticDomesticHotWaterProductionV2IOComponent.""" + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.water_heater import ( + STATE_ECO, + STATE_ELECTRIC, + STATE_HEAT_PUMP, + STATE_PERFORMANCE, + WaterHeaterEntity, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..entity import OverkizEntity + +DEFAULT_MIN_TEMP: float = 50.0 +DEFAULT_MAX_TEMP: float = 62.0 +MAX_BOOST_MODE_DURATION: int = 7 + +DHWP_AWAY_MODES = [ + OverkizCommandParam.ABSENCE, + OverkizCommandParam.AWAY, + OverkizCommandParam.FROSTPROTECTION, +] + + +class AtlanticDomesticHotWaterProductionV2IOComponent(OverkizEntity, WaterHeaterEntity): + """Representation of AtlanticDomesticHotWaterProductionV2IOComponent (io).""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + | WaterHeaterEntityFeature.AWAY_MODE + | WaterHeaterEntityFeature.ON_OFF + ) + _attr_operation_list = [ + STATE_ECO, + STATE_PERFORMANCE, + STATE_HEAT_PUMP, + STATE_ELECTRIC, + ] + + @property + def min_temp(self) -> float: + """Return the minimum temperature.""" + + min_temp = self.device.states[OverkizState.CORE_MINIMAL_TEMPERATURE_MANUAL_MODE] + if min_temp: + return cast(float, min_temp.value_as_float) + return DEFAULT_MIN_TEMP + + @property + def max_temp(self) -> float: + """Return the maximum temperature.""" + + max_temp = self.device.states[OverkizState.CORE_MAXIMAL_TEMPERATURE_MANUAL_MODE] + if max_temp: + return cast(float, max_temp.value_as_float) + return DEFAULT_MAX_TEMP + + @property + def current_temperature(self) -> float: + """Return the current temperature.""" + + return cast( + float, + self.executor.select_state( + OverkizState.IO_MIDDLE_WATER_TEMPERATURE, + ), + ) + + @property + def target_temperature(self) -> float: + """Return the temperature corresponding to the PRESET.""" + + return cast( + float, + self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE), + ) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + + temperature = kwargs.get(ATTR_TEMPERATURE) + await self.executor.async_execute_command( + OverkizCommand.SET_TARGET_TEMPERATURE, temperature, refresh_afterwards=False + ) + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + @property + def is_state_eco(self) -> bool: + """Return true if eco mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_ACTIVE + ) + + @property + def is_state_performance(self) -> bool: + """Return true if performance mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.AUTO_MODE + ) + + @property + def is_state_heat_pump(self) -> bool: + """Return true if heat pump mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_INACTIVE + ) + + @property + def is_away_mode_on(self) -> bool: + """Return true if away mode is on.""" + + away_mode_duration = cast( + str, self.executor.select_state(OverkizState.IO_AWAY_MODE_DURATION) + ) + # away_mode_duration can be either a Literal["always"] + if away_mode_duration == OverkizCommandParam.ALWAYS: + return True + + # Or an int of 0 to 7 days. But it still is a string. + if away_mode_duration.isdecimal() and int(away_mode_duration) > 0: + return True + + return False + + @property + def current_operation(self) -> str | None: + """Return current operation.""" + + # The Away Mode leaves the current operation unchanged + if self.is_boost_mode_on: + return STATE_ELECTRIC + + if self.is_state_eco: + return STATE_ECO + + if self.is_state_performance: + return STATE_PERFORMANCE + + if self.is_state_heat_pump: + return STATE_HEAT_PUMP + + return None + + @property + def is_boost_mode_on(self) -> bool: + """Return true if boost mode is on.""" + + return ( + cast( + int, + self.executor.select_state(OverkizState.CORE_BOOST_MODE_DURATION), + ) + > 0 + ) + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + + if operation_mode == STATE_ECO: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_ACTIVE, + refresh_afterwards=False, + ) + # ECO changes the target temperature so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + elif operation_mode == STATE_PERFORMANCE: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.AUTO_MODE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_HEAT_PUMP: + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to STATE_HEAT_PUMP + # changes the target temperature and requires a target temperature refresh + refresh_target_temp = True + + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_INACTIVE, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_ELECTRIC: + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + if not self.is_boost_mode_on: + await self.async_turn_boost_mode_on(refresh_afterwards=False) + await self.coordinator.async_refresh() + + async def async_turn_away_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn away mode on.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.ON, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_away_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn away mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode on.""" + + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to BOOST requires a target temperature refresh + refresh_target_temp = True + + await self.executor.async_execute_command( + OverkizCommand.SET_BOOST_MODE_DURATION, + MAX_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.ON, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the BOOST mode changes boost mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() diff --git a/homeassistant/components/person/manifest.json b/homeassistant/components/person/manifest.json index 7f370be6fbe..0c1792e9277 100644 --- a/homeassistant/components/person/manifest.json +++ b/homeassistant/components/person/manifest.json @@ -1,7 +1,6 @@ { "domain": "person", "name": "Person", - "after_dependencies": ["device_tracker"], "codeowners": [], "dependencies": ["image_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/person", diff --git a/homeassistant/components/pglab/cover.py b/homeassistant/components/pglab/cover.py new file mode 100644 index 00000000000..8385fd95ffa --- /dev/null +++ b/homeassistant/components/pglab/cover.py @@ -0,0 +1,107 @@ +"""PG LAB Electronics Cover.""" + +from __future__ import annotations + +from typing import Any + +from pypglab.device import Device as PyPGLabDevice +from pypglab.shutter import Shutter as PyPGLabShutter + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .discovery import PGLabDiscovery +from .entity import PGLabEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up switches for device.""" + + @callback + def async_discover( + pglab_device: PyPGLabDevice, pglab_shutter: PyPGLabShutter + ) -> None: + """Discover and add a PG LAB Cover.""" + pglab_discovery = config_entry.runtime_data + pglab_cover = PGLabCover(pglab_discovery, pglab_device, pglab_shutter) + async_add_entities([pglab_cover]) + + # Register the callback to create the cover entity when discovered. + pglab_discovery = config_entry.runtime_data + await pglab_discovery.register_platform(hass, Platform.COVER, async_discover) + + +class PGLabCover(PGLabEntity, CoverEntity): + """A PGLab Cover.""" + + _attr_translation_key = "shutter" + + def __init__( + self, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_shutter: PyPGLabShutter, + ) -> None: + """Initialize the Cover class.""" + + super().__init__( + pglab_discovery, + pglab_device, + pglab_shutter, + ) + + self._attr_unique_id = f"{pglab_device.id}_shutter{pglab_shutter.id}" + self._attr_translation_placeholders = {"shutter_id": pglab_shutter.id} + + self._shutter = pglab_shutter + + self._attr_device_class = CoverDeviceClass.SHUTTER + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP + ) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + await self._shutter.open() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close cover.""" + await self._shutter.close() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self._shutter.stop() + + @property + def is_closed(self) -> bool | None: + """Return if cover is closed.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSED + + @property + def is_closing(self) -> bool | None: + """Return if the cover is closing.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSING + + @property + def is_opening(self) -> bool | None: + """Return if the cover is opening.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_OPENING diff --git a/homeassistant/components/pglab/discovery.py b/homeassistant/components/pglab/discovery.py index e34f80a2e2d..c1d8653c17b 100644 --- a/homeassistant/components/pglab/discovery.py +++ b/homeassistant/components/pglab/discovery.py @@ -34,12 +34,14 @@ if TYPE_CHECKING: # Supported platforms. PLATFORMS = [ + Platform.COVER, Platform.SENSOR, Platform.SWITCH, ] # Used to create a new component entity. CREATE_NEW_ENTITY = { + Platform.COVER: "pglab_create_new_entity_cover", Platform.SENSOR: "pglab_create_new_entity_sensor", Platform.SWITCH: "pglab_create_new_entity_switch", } @@ -250,6 +252,13 @@ class PGLabDiscovery: ) self._discovered[pglab_device.id] = discovery_info + # Create all new cover entities. + for s in pglab_device.shutters: + # the HA entity is not yet created, send a message to create it + async_dispatcher_send( + hass, CREATE_NEW_ENTITY[Platform.COVER], pglab_device, s + ) + # Create all new relay entities. for r in pglab_device.relays: # The HA entity is not yet created, send a message to create it. diff --git a/homeassistant/components/pglab/strings.json b/homeassistant/components/pglab/strings.json index 4fad408ad98..c6f80d12f09 100644 --- a/homeassistant/components/pglab/strings.json +++ b/homeassistant/components/pglab/strings.json @@ -15,6 +15,11 @@ } }, "entity": { + "cover": { + "shutter": { + "name": "Shutter {shutter_id}" + } + }, "switch": { "relay": { "name": "Relay {relay_id}" diff --git a/homeassistant/components/philips_js/light.py b/homeassistant/components/philips_js/light.py index bf15292335e..87e3323a30c 100644 --- a/homeassistant/components/philips_js/light.py +++ b/homeassistant/components/philips_js/light.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any +from typing import Any, cast from haphilipsjs import PhilipsTV from haphilipsjs.typing import AmbilightCurrentConfiguration @@ -328,7 +328,7 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity): """Turn the bulb on.""" brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color) - attr_effect = kwargs.get(ATTR_EFFECT, self.effect) + attr_effect = cast(str, kwargs.get(ATTR_EFFECT, self.effect)) if not self._tv.on: raise HomeAssistantError("TV is not available") diff --git a/homeassistant/components/picnic/manifest.json b/homeassistant/components/picnic/manifest.json index 09f28da39a4..251964c15d0 100644 --- a/homeassistant/components/picnic/manifest.json +++ b/homeassistant/components/picnic/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/picnic", "iot_class": "cloud_polling", "loggers": ["python_picnic_api2"], - "requirements": ["python-picnic-api2==1.2.2"] + "requirements": ["python-picnic-api2==1.2.4"] } diff --git a/homeassistant/components/ping/strings.json b/homeassistant/components/ping/strings.json index ef9f74b4207..c301a1b277d 100644 --- a/homeassistant/components/ping/strings.json +++ b/homeassistant/components/ping/strings.json @@ -2,16 +2,16 @@ "entity": { "sensor": { "round_trip_time_avg": { - "name": "Round Trip Time Average" + "name": "Round-trip time average" }, "round_trip_time_max": { - "name": "Round Trip Time Maximum" + "name": "Round-trip time maximum" }, "round_trip_time_mdev": { - "name": "Round Trip Time Mean Deviation" + "name": "Round-trip time mean deviation" }, "round_trip_time_min": { - "name": "Round Trip Time Minimum" + "name": "Round-trip time minimum" } } }, diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index a0a51c7b9e6..b26ade8b725 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -11,6 +11,8 @@ from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHan from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Config flow to handle Minut Point OAuth2 authentication.""" @@ -56,7 +58,7 @@ class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): if reauth_entry.unique_id is not None: self._abort_if_unique_id_mismatch(reason="wrong_account") - logging.debug("user_id: %s", user_id) + _LOGGER.debug("user_id: %s", user_id) return self.async_update_reload_and_abort( reauth_entry, data_updates=data, unique_id=user_id ) diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index 98a9f757585..810fce41e05 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.25.0"] + "requirements": ["bluetooth-data-tools==1.26.1"] } diff --git a/homeassistant/components/private_ble_device/strings.json b/homeassistant/components/private_ble_device/strings.json index c35775a4843..845a5d92bae 100644 --- a/homeassistant/components/private_ble_device/strings.json +++ b/homeassistant/components/private_ble_device/strings.json @@ -14,7 +14,7 @@ "irk_not_valid": "The key does not look like a valid IRK." }, "abort": { - "bluetooth_not_available": "At least one Bluetooth adapter or remote bluetooth proxy must be configured to track Private BLE Devices." + "bluetooth_not_available": "At least one Bluetooth adapter or remote Bluetooth proxy must be configured to track Private BLE Devices." } }, "entity": { diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index 2e5ea221dca..8818eff2d81 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,5 +1,6 @@ """Config flow for ProgettiHWSW Automation integration.""" +import logging from typing import TYPE_CHECKING, Any from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI @@ -11,6 +12,8 @@ from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( {vol.Required("host"): str, vol.Required("port", default=80): int} ) @@ -86,7 +89,8 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): info = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input.update(info) diff --git a/homeassistant/components/pterodactyl/__init__.py b/homeassistant/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..33b3cc7576f --- /dev/null +++ b/homeassistant/components/pterodactyl/__init__.py @@ -0,0 +1,27 @@ +"""The Pterodactyl integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator + +_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PterodactylConfigEntry) -> bool: + """Set up Pterodactyl from a config entry.""" + coordinator = PterodactylCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: PterodactylConfigEntry +) -> bool: + """Unload a Pterodactyl config entry.""" + return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) diff --git a/homeassistant/components/pterodactyl/api.py b/homeassistant/components/pterodactyl/api.py new file mode 100644 index 00000000000..38cb9809652 --- /dev/null +++ b/homeassistant/components/pterodactyl/api.py @@ -0,0 +1,120 @@ +"""API module of the Pterodactyl integration.""" + +from dataclasses import dataclass +import logging + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ( + BadRequestError, + ClientConfigError, + PterodactylApiError, + PydactylError, +) + +from homeassistant.core import HomeAssistant + +_LOGGER = logging.getLogger(__name__) + + +class PterodactylConfigurationError(Exception): + """Raised when the configuration is invalid.""" + + +class PterodactylConnectionError(Exception): + """Raised when no data can be fechted from the server.""" + + +@dataclass +class PterodactylData: + """Data for the Pterodactyl server.""" + + name: str + uuid: str + identifier: str + state: str + memory_utilization: int + cpu_utilization: float + disk_utilization: int + network_rx_utilization: int + network_tx_utilization: int + uptime: int + + +class PterodactylAPI: + """Wrapper for Pterodactyl's API.""" + + pterodactyl: PterodactylClient | None + identifiers: list[str] + + def __init__(self, hass: HomeAssistant, host: str, api_key: str) -> None: + """Initialize the Pterodactyl API.""" + self.hass = hass + self.host = host + self.api_key = api_key + self.pterodactyl = None + self.identifiers = [] + + async def async_init(self): + """Initialize the Pterodactyl API.""" + self.pterodactyl = PterodactylClient(self.host, self.api_key) + + try: + paginated_response = await self.hass.async_add_executor_job( + self.pterodactyl.client.servers.list_servers + ) + except ClientConfigError as error: + raise PterodactylConfigurationError(error) from error + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + game_servers = paginated_response.collect() + for game_server in game_servers: + self.identifiers.append(game_server["attributes"]["identifier"]) + + _LOGGER.debug("Identifiers of Pterodactyl servers: %s", self.identifiers) + + def get_server_data(self, identifier: str) -> tuple[dict, dict]: + """Get all data from the Pterodactyl server.""" + server = self.pterodactyl.client.servers.get_server(identifier) # type: ignore[union-attr] + utilization = self.pterodactyl.client.servers.get_server_utilization( # type: ignore[union-attr] + identifier + ) + + return server, utilization + + async def async_get_data(self) -> dict[str, PterodactylData]: + """Update the data from all Pterodactyl servers.""" + data = {} + + for identifier in self.identifiers: + try: + server, utilization = await self.hass.async_add_executor_job( + self.get_server_data, identifier + ) + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + data[identifier] = PterodactylData( + name=server["name"], + uuid=server["uuid"], + identifier=identifier, + state=utilization["current_state"], + cpu_utilization=utilization["resources"]["cpu_absolute"], + memory_utilization=utilization["resources"]["memory_bytes"], + disk_utilization=utilization["resources"]["disk_bytes"], + network_rx_utilization=utilization["resources"]["network_rx_bytes"], + network_tx_utilization=utilization["resources"]["network_tx_bytes"], + uptime=utilization["resources"]["uptime"], + ) + + _LOGGER.debug("%s", data[identifier]) + + return data diff --git a/homeassistant/components/pterodactyl/binary_sensor.py b/homeassistant/components/pterodactyl/binary_sensor.py new file mode 100644 index 00000000000..e3615c47499 --- /dev/null +++ b/homeassistant/components/pterodactyl/binary_sensor.py @@ -0,0 +1,64 @@ +"""Binary sensor platform of the Pterodactyl integration.""" + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator +from .entity import PterodactylEntity + +KEY_STATUS = "status" + + +BINARY_SENSOR_DESCRIPTIONS = [ + BinarySensorEntityDescription( + key=KEY_STATUS, + translation_key=KEY_STATUS, + device_class=BinarySensorDeviceClass.RUNNING, + ), +] + +# Coordinator is used to centralize the data updates. +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Pterodactyl binary sensor platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + PterodactylBinarySensorEntity( + coordinator, identifier, description, config_entry + ) + for identifier in coordinator.api.identifiers + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class PterodactylBinarySensorEntity(PterodactylEntity, BinarySensorEntity): + """Representation of a Pterodactyl binary sensor base entity.""" + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + description: BinarySensorEntityDescription, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize binary sensor base entity.""" + super().__init__(coordinator, identifier, config_entry) + self.entity_description = description + self._attr_unique_id = f"{self.game_server_data.uuid}_{description.key}" + + @property + def is_on(self) -> bool: + """Return binary sensor state.""" + return self.game_server_data.state == "running" diff --git a/homeassistant/components/pterodactyl/config_flow.py b/homeassistant/components/pterodactyl/config_flow.py new file mode 100644 index 00000000000..a36069d2bb9 --- /dev/null +++ b/homeassistant/components/pterodactyl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for the Pterodactyl integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from yarl import URL + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_KEY, CONF_URL + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, +) +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_URL = "http://localhost:8080" + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_URL, default=DEFAULT_URL): str, + vol.Required(CONF_API_KEY): str, + } +) + + +class PterodactylConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Pterodactyl.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + url = URL(user_input[CONF_URL]).human_repr() + api_key = user_input[CONF_API_KEY] + + self._async_abort_entries_match({CONF_URL: url}) + api = PterodactylAPI(self.hass, url, api_key) + + try: + await api.async_init() + except (PterodactylConfigurationError, PterodactylConnectionError): + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception occurred during config flow") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=url, data=user_input) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/pterodactyl/const.py b/homeassistant/components/pterodactyl/const.py new file mode 100644 index 00000000000..8cf4d0c3963 --- /dev/null +++ b/homeassistant/components/pterodactyl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Pterodactyl integration.""" + +DOMAIN = "pterodactyl" diff --git a/homeassistant/components/pterodactyl/coordinator.py b/homeassistant/components/pterodactyl/coordinator.py new file mode 100644 index 00000000000..36456ade630 --- /dev/null +++ b/homeassistant/components/pterodactyl/coordinator.py @@ -0,0 +1,66 @@ +"""Data update coordinator of the Pterodactyl integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, + PterodactylData, +) + +SCAN_INTERVAL = timedelta(seconds=60) + +_LOGGER = logging.getLogger(__name__) + +type PterodactylConfigEntry = ConfigEntry[PterodactylCoordinator] + + +class PterodactylCoordinator(DataUpdateCoordinator[dict[str, PterodactylData]]): + """Pterodactyl data update coordinator.""" + + config_entry: PterodactylConfigEntry + api: PterodactylAPI + + def __init__( + self, + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize coordinator instance.""" + + super().__init__( + hass=hass, + name=config_entry.data[CONF_URL], + config_entry=config_entry, + logger=_LOGGER, + update_interval=SCAN_INTERVAL, + ) + + async def _async_setup(self) -> None: + """Set up the Pterodactyl data coordinator.""" + self.api = PterodactylAPI( + hass=self.hass, + host=self.config_entry.data[CONF_URL], + api_key=self.config_entry.data[CONF_API_KEY], + ) + + try: + await self.api.async_init() + except PterodactylConfigurationError as error: + raise UpdateFailed(error) from error + + async def _async_update_data(self) -> dict[str, PterodactylData]: + """Get updated data from the Pterodactyl server.""" + try: + return await self.api.async_get_data() + except PterodactylConnectionError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/pterodactyl/entity.py b/homeassistant/components/pterodactyl/entity.py new file mode 100644 index 00000000000..49fd65af476 --- /dev/null +++ b/homeassistant/components/pterodactyl/entity.py @@ -0,0 +1,47 @@ +"""Base entity for the Pterodactyl integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .api import PterodactylData +from .const import DOMAIN +from .coordinator import PterodactylCoordinator + +MANUFACTURER = "Pterodactyl" + + +class PterodactylEntity(CoordinatorEntity[PterodactylCoordinator]): + """Representation of a Pterodactyl base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + config_entry: ConfigEntry, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + + self.identifier = identifier + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + manufacturer=MANUFACTURER, + name=self.game_server_data.name, + model=self.game_server_data.name, + model_id=self.game_server_data.uuid, + configuration_url=f"{config_entry.data[CONF_URL]}/server/{identifier}", + ) + + @property + def available(self) -> bool: + """Return binary sensor availability.""" + return super().available and self.identifier in self.coordinator.data + + @property + def game_server_data(self) -> PterodactylData: + """Return game server data.""" + return self.coordinator.data[self.identifier] diff --git a/homeassistant/components/pterodactyl/manifest.json b/homeassistant/components/pterodactyl/manifest.json new file mode 100644 index 00000000000..8ffa21dd186 --- /dev/null +++ b/homeassistant/components/pterodactyl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "pterodactyl", + "name": "Pterodactyl", + "codeowners": ["@elmurato"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/pterodactyl", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["py-dactyl==2.0.4"] +} diff --git a/homeassistant/components/pterodactyl/quality_scale.yaml b/homeassistant/components/pterodactyl/quality_scale.yaml new file mode 100644 index 00000000000..dae3b9fa11a --- /dev/null +++ b/homeassistant/components/pterodactyl/quality_scale.yaml @@ -0,0 +1,93 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration doesn't provide any service actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: done + docs-actions: + status: exempt + comment: Integration doesn't provide any service actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: Handled by coordinator. + entity-unique-id: + status: done + comment: Using confid entry ID as the dependency pydactyl doesn't provide a unique information. + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: + status: done + comment: | + Raising ConfigEntryNotReady, if the initialization isn't successful. + unique-config-entry: + status: done + comment: | + As there is no unique information available from the dependency pydactyl, + the server host is used to identify that the same service is already configured. + + # Silver + action-exceptions: + status: exempt + comment: Integration doesn't provide any service actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration doesn't support any configuration parameters. + docs-installation-parameters: todo + entity-unavailable: + status: done + comment: Handled by coordinator. + integration-owner: done + log-when-unavailable: + status: done + comment: Handled by coordinator. + parallel-updates: done + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: No discovery possible. + discovery-update-info: + status: exempt + comment: | + No discovery possible. Users can use the (local or public) hostname instead of an IP address, + if static IP addresses cannot be configured. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: No repair use-cases for this integration. + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: + status: exempt + comment: Integration isn't making any HTTP requests. + strict-typing: todo diff --git a/homeassistant/components/pterodactyl/strings.json b/homeassistant/components/pterodactyl/strings.json new file mode 100644 index 00000000000..a875c72ccd8 --- /dev/null +++ b/homeassistant/components/pterodactyl/strings.json @@ -0,0 +1,30 @@ +{ + "config": { + "step": { + "user": { + "data": { + "url": "[%key:common::config_flow::data::url%]", + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "url": "The URL of your Pterodactyl server, including the protocol (http:// or https://) and optionally the port number.", + "api_key": "The account API key for accessing your Pterodactyl server." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + }, + "entity": { + "binary_sensor": { + "status": { + "name": "Status" + } + } + } +} diff --git a/homeassistant/components/purpleair/__init__.py b/homeassistant/components/purpleair/__init__.py index 2d4022946b2..78986b34351 100644 --- a/homeassistant/components/purpleair/__init__.py +++ b/homeassistant/components/purpleair/__init__.py @@ -2,37 +2,34 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator - -PLATFORMS = [Platform.SENSOR] +from .const import PLATFORMS +from .coordinator import PurpleAirConfigEntry, PurpleAirDataUpdateCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up PurpleAir from a config entry.""" - coordinator = PurpleAirDataUpdateCoordinator(hass, entry) +async def async_setup_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> bool: + """Set up PurpleAir config entry.""" + coordinator = PurpleAirDataUpdateCoordinator( + hass, + entry, + ) + entry.runtime_data = coordinator + await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(async_handle_entry_update)) + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) return True -async def async_handle_entry_update(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle an options update.""" +async def async_reload_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> None: + """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok +async def async_unload_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> bool: + """Unload config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/purpleair/const.py b/homeassistant/components/purpleair/const.py index 5f1ec84d469..fcb928bd4f3 100644 --- a/homeassistant/components/purpleair/const.py +++ b/homeassistant/components/purpleair/const.py @@ -1,10 +1,13 @@ """Constants for the PurpleAir integration.""" import logging +from typing import Final -DOMAIN = "purpleair" +from homeassistant.const import Platform -LOGGER = logging.getLogger(__package__) +LOGGER: Final = logging.getLogger(__package__) +PLATFORMS: Final = [Platform.SENSOR] -CONF_READ_KEY = "read_key" -CONF_SENSOR_INDICES = "sensor_indices" +DOMAIN: Final[str] = "purpleair" + +CONF_SENSOR_INDICES: Final[str] = "sensor_indices" diff --git a/homeassistant/components/purpleair/coordinator.py b/homeassistant/components/purpleair/coordinator.py index f1511733cfa..4ed0c0340c6 100644 --- a/homeassistant/components/purpleair/coordinator.py +++ b/homeassistant/components/purpleair/coordinator.py @@ -46,12 +46,15 @@ SENSOR_FIELDS_TO_RETRIEVE = [ UPDATE_INTERVAL = timedelta(minutes=2) +type PurpleAirConfigEntry = ConfigEntry[PurpleAirDataUpdateCoordinator] + + class PurpleAirDataUpdateCoordinator(DataUpdateCoordinator[GetSensorsResponse]): """Define a PurpleAir-specific coordinator.""" - config_entry: ConfigEntry + config_entry: PurpleAirConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: PurpleAirConfigEntry) -> None: """Initialize.""" self._api = API( entry.data[CONF_API_KEY], diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index f7c44b7e9b2..71b83e277d3 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -14,8 +13,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .coordinator import PurpleAirConfigEntry CONF_TITLE = "title" @@ -30,14 +28,13 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: PurpleAirConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: PurpleAirDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.model_dump(), + "data": entry.runtime_data.data.model_dump(), }, TO_REDACT, ) diff --git a/homeassistant/components/purpleair/entity.py b/homeassistant/components/purpleair/entity.py index 4f7be1874ed..410fdd9b942 100644 --- a/homeassistant/components/purpleair/entity.py +++ b/homeassistant/components/purpleair/entity.py @@ -7,13 +7,12 @@ from typing import Any from aiopurpleair.models.sensors import SensorModel -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .coordinator import PurpleAirConfigEntry, PurpleAirDataUpdateCoordinator class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): @@ -23,12 +22,11 @@ class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): def __init__( self, - coordinator: PurpleAirDataUpdateCoordinator, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, sensor_index: int, ) -> None: """Initialize.""" - super().__init__(coordinator) + super().__init__(entry.runtime_data) self._sensor_index = sensor_index diff --git a/homeassistant/components/purpleair/sensor.py b/homeassistant/components/purpleair/sensor.py index bed1d878557..a85a23b6144 100644 --- a/homeassistant/components/purpleair/sensor.py +++ b/homeassistant/components/purpleair/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, PERCENTAGE, @@ -27,8 +26,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import CONF_SENSOR_INDICES, DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .const import CONF_SENSOR_INDICES +from .coordinator import PurpleAirConfigEntry from .entity import PurpleAirEntity CONCENTRATION_PARTICLES_PER_100_MILLILITERS = f"particles/100{UnitOfVolume.MILLILITERS}" @@ -165,13 +164,12 @@ SENSOR_DESCRIPTIONS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up PurpleAir sensors based on a config entry.""" - coordinator: PurpleAirDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( - PurpleAirSensorEntity(coordinator, entry, sensor_index, description) + PurpleAirSensorEntity(entry, sensor_index, description) for sensor_index in entry.options[CONF_SENSOR_INDICES] for description in SENSOR_DESCRIPTIONS ) @@ -184,13 +182,12 @@ class PurpleAirSensorEntity(PurpleAirEntity, SensorEntity): def __init__( self, - coordinator: PurpleAirDataUpdateCoordinator, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, sensor_index: int, description: PurpleAirSensorEntityDescription, ) -> None: """Initialize.""" - super().__init__(coordinator, entry, sensor_index) + super().__init__(entry, sensor_index) self._attr_unique_id = f"{self._sensor_index}-{description.key}" self.entity_description = description diff --git a/homeassistant/components/qbus/climate.py b/homeassistant/components/qbus/climate.py new file mode 100644 index 00000000000..57d97c046b7 --- /dev/null +++ b/homeassistant/components/qbus/climate.py @@ -0,0 +1,172 @@ +"""Support for Qbus thermostat.""" + +import logging +from typing import Any + +from qbusmqttapi.const import KEY_PROPERTIES_REGIME, KEY_PROPERTIES_SET_TEMPERATURE +from qbusmqttapi.discovery import QbusMqttOutput +from qbusmqttapi.state import QbusMqttThermoState, StateType + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.components.mqtt import ReceiveMessage, client as mqtt +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.debounce import Debouncer +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import QbusConfigEntry +from .entity import QbusEntity, add_new_outputs + +PARALLEL_UPDATES = 0 + +STATE_REQUEST_DELAY = 2 + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: QbusConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up climate entities.""" + + coordinator = entry.runtime_data + added_outputs: list[QbusMqttOutput] = [] + + def _check_outputs() -> None: + add_new_outputs( + coordinator, + added_outputs, + lambda output: output.type == "thermo", + QbusClimate, + async_add_entities, + ) + + _check_outputs() + entry.async_on_unload(coordinator.async_add_listener(_check_outputs)) + + +class QbusClimate(QbusEntity, ClimateEntity): + """Representation of a Qbus climate entity.""" + + _attr_hvac_modes = [HVACMode.HEAT] + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, mqtt_output: QbusMqttOutput) -> None: + """Initialize climate entity.""" + + super().__init__(mqtt_output) + + self._attr_hvac_action = HVACAction.IDLE + self._attr_hvac_mode = HVACMode.HEAT + + set_temp: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_SET_TEMPERATURE, {} + ) + current_regime: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_REGIME, {} + ) + + self._attr_min_temp: float = set_temp.get("min", 0) + self._attr_max_temp: float = set_temp.get("max", 35) + self._attr_target_temperature_step: float = set_temp.get("step", 0.5) + self._attr_preset_modes: list[str] = current_regime.get("enumValues", []) + self._attr_preset_mode: str = ( + self._attr_preset_modes[0] if len(self._attr_preset_modes) > 0 else "" + ) + + self._request_state_debouncer: Debouncer | None = None + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + self._request_state_debouncer = Debouncer( + self.hass, + _LOGGER, + cooldown=STATE_REQUEST_DELAY, + immediate=False, + function=self._async_request_state, + ) + await super().async_added_to_hass() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new target preset mode.""" + + if preset_mode not in self._attr_preset_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_preset", + translation_placeholders={ + "preset": preset_mode, + "options": ", ".join(self._attr_preset_modes), + }, + ) + + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_regime(preset_mode) + + await self._async_publish_output_state(state) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + + if temperature is not None and isinstance(temperature, float): + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_set_temperature(temperature) + + await self._async_publish_output_state(state) + + async def _state_received(self, msg: ReceiveMessage) -> None: + state = self._message_factory.parse_output_state( + QbusMqttThermoState, msg.payload + ) + + if state is None: + return + + if preset_mode := state.read_regime(): + self._attr_preset_mode = preset_mode + + if current_temperature := state.read_current_temperature(): + self._attr_current_temperature = current_temperature + + if target_temperature := state.read_set_temperature(): + self._attr_target_temperature = target_temperature + + self._set_hvac_action() + + # When the state type is "event", the payload only contains the changed + # property. Request the state to get the full payload. However, changing + # temperature step by step could cause a flood of state requests, so we're + # holding off a few seconds before requesting the full state. + if state.type == StateType.EVENT: + assert self._request_state_debouncer is not None + await self._request_state_debouncer.async_call() + + self.async_schedule_update_ha_state() + + def _set_hvac_action(self) -> None: + if self.target_temperature is None or self.current_temperature is None: + self._attr_hvac_action = HVACAction.IDLE + return + + self._attr_hvac_action = ( + HVACAction.HEATING + if self.target_temperature > self.current_temperature + else HVACAction.IDLE + ) + + async def _async_request_state(self) -> None: + request = self._message_factory.create_state_request([self._mqtt_output.id]) + await mqtt.async_publish(self.hass, request.topic, request.payload) diff --git a/homeassistant/components/qbus/const.py b/homeassistant/components/qbus/const.py index b9e42f13766..767a41f48cc 100644 --- a/homeassistant/components/qbus/const.py +++ b/homeassistant/components/qbus/const.py @@ -6,6 +6,7 @@ from homeassistant.const import Platform DOMAIN: Final = "qbus" PLATFORMS: list[Platform] = [ + Platform.CLIMATE, Platform.LIGHT, Platform.SWITCH, ] diff --git a/homeassistant/components/qbus/light.py b/homeassistant/components/qbus/light.py index 5ec76f5e807..3d2c763b8e3 100644 --- a/homeassistant/components/qbus/light.py +++ b/homeassistant/components/qbus/light.py @@ -51,7 +51,7 @@ class QbusLight(QbusEntity, LightEntity): super().__init__(mqtt_output) - self._set_state() + self._set_state(0) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" @@ -74,7 +74,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_percentage(percentage) await self._async_publish_output_state(state) - self._set_state(percentage=percentage, on=on) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -82,7 +81,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_on_off(on=False) await self._async_publish_output_state(state) - self._set_state(on=False) async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( @@ -91,20 +89,9 @@ class QbusLight(QbusEntity, LightEntity): if output is not None: percentage = round(output.read_percentage()) - self._set_state(percentage=percentage) + self._set_state(percentage) self.async_schedule_update_ha_state() - def _set_state( - self, *, percentage: int | None = None, on: bool | None = None - ) -> None: - if percentage is None: - # When turning on without brightness, we don't know the desired - # brightness. It will be set during _state_received(). - if on is True: - self._attr_is_on = True - else: - self._attr_is_on = False - self._attr_brightness = 0 - else: - self._attr_is_on = percentage > 0 - self._attr_brightness = value_to_brightness((1, 100), percentage) + def _set_state(self, percentage: int = 0) -> None: + self._attr_is_on = percentage > 0 + self._attr_brightness = value_to_brightness((1, 100), percentage) diff --git a/homeassistant/components/qbus/strings.json b/homeassistant/components/qbus/strings.json index e6df18c393c..f308c5b3519 100644 --- a/homeassistant/components/qbus/strings.json +++ b/homeassistant/components/qbus/strings.json @@ -15,5 +15,10 @@ "error": { "no_controller": "No controllers were found" } + }, + "exceptions": { + "invalid_preset": { + "message": "Preset mode \"{preset}\" is not valid. Valid preset modes are: {options}." + } } } diff --git a/homeassistant/components/qbus/switch.py b/homeassistant/components/qbus/switch.py index 002ad43e904..e1feccf4450 100644 --- a/homeassistant/components/qbus/switch.py +++ b/homeassistant/components/qbus/switch.py @@ -57,7 +57,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(True) await self._async_publish_output_state(state) - self._attr_is_on = True async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -65,7 +64,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(False) await self._async_publish_output_state(state) - self._attr_is_on = False async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( diff --git a/homeassistant/components/qnap/config_flow.py b/homeassistant/components/qnap/config_flow.py index 75f41a27f69..504883b55e9 100644 --- a/homeassistant/components/qnap/config_flow.py +++ b/homeassistant/components/qnap/config_flow.py @@ -70,8 +70,8 @@ class QnapConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except TypeError: errors["base"] = "invalid_auth" - except Exception as error: # noqa: BLE001 - _LOGGER.error(error) + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: unique_id = stats["system"]["serial_number"] diff --git a/homeassistant/components/rabbitair/config_flow.py b/homeassistant/components/rabbitair/config_flow.py index f4487a73b58..43959e1e42c 100644 --- a/homeassistant/components/rabbitair/config_flow.py +++ b/homeassistant/components/rabbitair/config_flow.py @@ -74,8 +74,8 @@ class RabbitAirConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_host" except TimeoutConnect: errors["base"] = "timeout_connect" - except Exception as err: # noqa: BLE001 - _LOGGER.debug("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input[CONF_MAC] = info["mac"] diff --git a/homeassistant/components/rachio/strings.json b/homeassistant/components/rachio/strings.json index 308403d805d..d51a1d5f920 100644 --- a/homeassistant/components/rachio/strings.json +++ b/homeassistant/components/rachio/strings.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Connect to your Rachio device", - "description": "You will need the API Key from https://app.rach.io/. Go to Settings, then select 'GET API KEY'.", + "description": "You will need the API key from https://app.rach.io/. Go to Settings, then select 'GET API KEY'.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } @@ -70,7 +70,7 @@ }, "start_watering": { "name": "Start watering", - "description": "Start a single zone, a schedule or any number of smart hose timers.", + "description": "Starts a single zone, a schedule or any number of smart hose timers.", "fields": { "duration": { "name": "Duration", @@ -80,7 +80,7 @@ }, "pause_watering": { "name": "Pause watering", - "description": "Pause any currently running zones or schedules.", + "description": "Pauses any currently running zones or schedules.", "fields": { "devices": { "name": "Devices", @@ -94,7 +94,7 @@ }, "resume_watering": { "name": "Resume watering", - "description": "Resume any paused zone runs or schedules.", + "description": "Resumes any paused zone runs or schedules.", "fields": { "devices": { "name": "[%key:component::rachio::services::pause_watering::fields::devices::name%]", @@ -104,7 +104,7 @@ }, "stop_watering": { "name": "Stop watering", - "description": "Stop any currently running zones or schedules.", + "description": "Stops any currently running zones or schedules.", "fields": { "devices": { "name": "[%key:component::rachio::services::pause_watering::fields::devices::name%]", diff --git a/homeassistant/components/recorder/auto_repairs/schema.py b/homeassistant/components/recorder/auto_repairs/schema.py index 1373f466bc2..cf3addd4f20 100644 --- a/homeassistant/components/recorder/auto_repairs/schema.py +++ b/homeassistant/components/recorder/auto_repairs/schema.py @@ -175,7 +175,7 @@ def _validate_db_schema_precision( # Mark the session as read_only to ensure that the test data is not committed # to the database and we always rollback when the scope is exited with session_scope(session=instance.get_session(), read_only=True) as session: - db_object = table_object(**{column: PRECISE_NUMBER for column in columns}) + db_object = table_object(**dict.fromkeys(columns, PRECISE_NUMBER)) table = table_object.__tablename__ try: session.add(db_object) @@ -184,7 +184,7 @@ def _validate_db_schema_precision( _check_columns( schema_errors=schema_errors, stored={column: getattr(db_object, column) for column in columns}, - expected={column: PRECISE_NUMBER for column in columns}, + expected=dict.fromkeys(columns, PRECISE_NUMBER), columns=columns, table_name=table, supports="double precision", diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 36ff63a0496..4797eecda0f 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -54,6 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36 EVENT_TYPE_IDS_SCHEMA_VERSION = 37 STATES_META_SCHEMA_VERSION = 38 LAST_REPORTED_SCHEMA_VERSION = 43 +CIRCULAR_MEAN_SCHEMA_VERSION = 49 LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28 LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43 diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 62afa0e7b04..7b8043b9201 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -79,7 +79,13 @@ from .db_schema import ( StatisticsShortTerm, ) from .executor import DBInterruptibleThreadPoolExecutor -from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect +from .models import ( + DatabaseEngine, + StatisticData, + StatisticMeanType, + StatisticMetaData, + UnsupportedDialect, +) from .pool import POOL_SIZE, MutexPool, RecorderPool from .table_managers.event_data import EventDataManager from .table_managers.event_types import EventTypeManager @@ -611,6 +617,17 @@ class Recorder(threading.Thread): table: type[Statistics | StatisticsShortTerm], ) -> None: """Schedule import of statistics.""" + if "mean_type" not in metadata: + # Backwards compatibility for old metadata format + # Can be removed after 2026.4 + metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if metadata.get("has_mean") + else StatisticMeanType.NONE + ) + # Remove deprecated has_mean as it's not needed anymore in core + metadata.pop("has_mean", None) + self.queue_task(ImportStatisticsTask(metadata, stats, table)) @callback diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index d1a2405406e..6566cadf64c 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -58,6 +58,7 @@ from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, bytes_to_ulid_or_none, bytes_to_uuid_hex_or_none, @@ -77,7 +78,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 48 +SCHEMA_VERSION = 50 _LOGGER = logging.getLogger(__name__) @@ -203,11 +204,11 @@ UINT_32_TYPE = BigInteger().with_variant( "mariadb", ) JSON_VARIANT_CAST = Text().with_variant( - postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + postgresql.JSON(none_as_null=True), "postgresql", ) JSONB_VARIANT_CAST = Text().with_variant( - postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + postgresql.JSONB(none_as_null=True), "postgresql", ) DATETIME_TYPE = ( @@ -719,6 +720,7 @@ class StatisticsBase: start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE) min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) @@ -740,6 +742,7 @@ class StatisticsBase: start=None, start_ts=stats["start"].timestamp(), mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -763,6 +766,7 @@ class StatisticsBase: start=None, start_ts=stats["start_ts"], mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -848,6 +852,9 @@ class _StatisticsMeta: has_mean: Mapped[bool | None] = mapped_column(Boolean) has_sum: Mapped[bool | None] = mapped_column(Boolean) name: Mapped[str | None] = mapped_column(String(255)) + mean_type: Mapped[StatisticMeanType] = mapped_column( + SmallInteger, nullable=False, default=StatisticMeanType.NONE.value + ) # See StatisticMeanType @staticmethod def from_meta(meta: StatisticMetaData) -> StatisticsMeta: diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 3ba36ab86c0..f5336e2a85b 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "quality_scale": "internal", "requirements": [ - "SQLAlchemy==2.0.38", + "SQLAlchemy==2.0.39", "fnv-hash-fast==1.4.0", "psutil-home-assistant==0.0.1" ] diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 3aa12f2b1f9..58af15c2aa7 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -9,7 +9,7 @@ from dataclasses import dataclass, replace as dataclass_replace from datetime import timedelta import logging from time import time -from typing import TYPE_CHECKING, Any, cast, final +from typing import TYPE_CHECKING, Any, TypedDict, cast, final from uuid import UUID import sqlalchemy @@ -81,7 +81,7 @@ from .db_schema import ( StatisticsRuns, StatisticsShortTerm, ) -from .models import process_timestamp +from .models import StatisticMeanType, process_timestamp from .models.time import datetime_to_timestamp_or_none from .queries import ( batch_cleanup_entity_ids, @@ -144,24 +144,32 @@ class _ColumnTypesForDialect: big_int_type: str timestamp_type: str context_bin_type: str + small_int_type: str + double_type: str _MYSQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER(20)", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type=f"BLOB({CONTEXT_ID_BIN_MAX_LENGTH})", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _POSTGRESQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type="BYTEA", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _SQLITE_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type="FLOAT", context_bin_type="BLOB", + small_int_type="INTEGER", + double_type="FLOAT", ) _COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] = { @@ -712,6 +720,11 @@ def _modify_columns( raise +class _FKAlterDict(TypedDict): + old_fk: ForeignKeyConstraint + columns: list[str] + + def _update_states_table_with_foreign_key_options( session_maker: Callable[[], Session], engine: Engine ) -> None: @@ -729,7 +742,7 @@ def _update_states_table_with_foreign_key_options( inspector = sqlalchemy.inspect(engine) tmp_states_table = Table(TABLE_STATES, MetaData()) - alters = [ + alters: list[_FKAlterDict] = [ { "old_fk": ForeignKeyConstraint( (), (), name=foreign_key["name"], table=tmp_states_table @@ -755,14 +768,14 @@ def _update_states_table_with_foreign_key_options( with session_scope(session=session_maker()) as session: try: connection = session.connection() - connection.execute(DropConstraint(alter["old_fk"])) # type: ignore[no-untyped-call] + connection.execute(DropConstraint(alter["old_fk"])) for fkc in states_key_constraints: if fkc.column_keys == alter["columns"]: # AddConstraint mutates the constraint passed to it, we need to # undo that to avoid changing the behavior of the table schema. # https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748 create_rule = fkc._create_rule # noqa: SLF001 - add_constraint = AddConstraint(fkc) # type: ignore[no-untyped-call] + add_constraint = AddConstraint(fkc) fkc._create_rule = create_rule # noqa: SLF001 connection.execute(add_constraint) except (InternalError, OperationalError): @@ -800,7 +813,7 @@ def _drop_foreign_key_constraints( with session_scope(session=session_maker()) as session: try: connection = session.connection() - connection.execute(DropConstraint(drop)) # type: ignore[no-untyped-call] + connection.execute(DropConstraint(drop)) except (InternalError, OperationalError): _LOGGER.exception( "Could not drop foreign constraints in %s table on %s", @@ -845,7 +858,7 @@ def _restore_foreign_key_constraints( # undo that to avoid changing the behavior of the table schema. # https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748 create_rule = constraint._create_rule # noqa: SLF001 - add_constraint = AddConstraint(constraint) # type: ignore[no-untyped-call] + add_constraint = AddConstraint(constraint) constraint._create_rule = create_rule # noqa: SLF001 try: _add_constraint(session_maker, add_constraint, table, column) @@ -1988,6 +2001,42 @@ class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48): _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) +class _SchemaVersion49Migrator(_SchemaVersionMigrator, target_version=49): + def _apply_update(self) -> None: + """Version specific update method.""" + _add_columns( + self.session_maker, + "statistics_meta", + [ + f"mean_type {self.column_types.small_int_type} NOT NULL DEFAULT {StatisticMeanType.NONE.value}" + ], + ) + + for table in ("statistics", "statistics_short_term"): + _add_columns( + self.session_maker, + table, + [f"mean_weight {self.column_types.double_type}"], + ) + + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute( + text( + "UPDATE statistics_meta SET mean_type=:mean_type WHERE has_mean=true" + ), + {"mean_type": StatisticMeanType.ARITHMETIC.value}, + ) + + +class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50): + def _apply_update(self) -> None: + """Version specific update method.""" + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute(text("UPDATE statistics_meta SET has_mean=NULL")) + + def _migrate_statistics_columns_to_timestamp_removing_duplicates( hass: HomeAssistant, instance: Recorder, diff --git a/homeassistant/components/recorder/models/__init__.py b/homeassistant/components/recorder/models/__init__.py index ea7a6c86854..8f76982a900 100644 --- a/homeassistant/components/recorder/models/__init__.py +++ b/homeassistant/components/recorder/models/__init__.py @@ -17,6 +17,7 @@ from .statistics import ( RollingWindowStatisticPeriod, StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticPeriod, StatisticResult, @@ -37,6 +38,7 @@ __all__ = [ "RollingWindowStatisticPeriod", "StatisticData", "StatisticDataTimestamp", + "StatisticMeanType", "StatisticMetaData", "StatisticPeriod", "StatisticResult", diff --git a/homeassistant/components/recorder/models/statistics.py b/homeassistant/components/recorder/models/statistics.py index ad4d82067c4..08da12d6b17 100644 --- a/homeassistant/components/recorder/models/statistics.py +++ b/homeassistant/components/recorder/models/statistics.py @@ -3,7 +3,8 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Literal, TypedDict +from enum import IntEnum +from typing import Literal, NotRequired, TypedDict class StatisticResult(TypedDict): @@ -36,6 +37,7 @@ class StatisticMixIn(TypedDict, total=False): min: float max: float mean: float + mean_weight: float class StatisticData(StatisticDataBase, StatisticMixIn, total=False): @@ -50,10 +52,20 @@ class StatisticDataTimestamp(StatisticDataTimestampBase, StatisticMixIn, total=F last_reset_ts: float | None +class StatisticMeanType(IntEnum): + """Statistic mean type.""" + + NONE = 0 + ARITHMETIC = 1 + CIRCULAR = 2 + + class StatisticMetaData(TypedDict): """Statistic meta data class.""" - has_mean: bool + # has_mean is deprecated, use mean_type instead. has_mean will be removed in 2026.4 + has_mean: NotRequired[bool] + mean_type: StatisticMeanType has_sum: bool name: str | None source: str diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 97fe73c54fe..2507a66899e 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -9,12 +9,23 @@ from datetime import datetime, timedelta from functools import lru_cache, partial from itertools import chain, groupby import logging +import math from operator import itemgetter import re from time import time as time_time -from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast +from typing import TYPE_CHECKING, Any, Literal, Required, TypedDict, cast -from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text +from sqlalchemy import ( + Label, + Select, + and_, + bindparam, + case, + func, + lambda_stmt, + select, + text, +) from sqlalchemy.engine.row import Row from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm.session import Session @@ -29,6 +40,7 @@ from homeassistant.helpers.singleton import singleton from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.collection import chunked_or_all +from homeassistant.util.enum import try_parse_enum from homeassistant.util.unit_conversion import ( AreaConverter, BaseUnitConverter, @@ -74,6 +86,7 @@ from .db_schema import ( from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticResult, datetime_to_timestamp_or_none, @@ -113,11 +126,54 @@ QUERY_STATISTICS_SHORT_TERM = ( StatisticsShortTerm.sum, ) + +def query_circular_mean(table: type[StatisticsBase]) -> tuple[Label, Label]: + """Return the sqlalchemy function for circular mean and the mean_weight. + + The result must be modulo 360 to normalize the result [0, 360]. + """ + # Postgres doesn't support modulo for double precision and + # the other dbs return the remainder instead of the modulo + # meaning negative values are possible. For these reason + # we need to normalize the result to be in the range [0, 360) + # in Python. + # https://en.wikipedia.org/wiki/Circular_mean + radians = func.radians(table.mean) + weight = func.sqrt( + func.power(func.sum(func.sin(radians) * table.mean_weight), 2) + + func.power(func.sum(func.cos(radians) * table.mean_weight), 2) + ) + return ( + func.degrees( + func.atan2(func.sum(func.sin(radians)), func.sum(func.cos(radians))) + ).label("mean"), + weight.label("mean_weight"), + ) + + QUERY_STATISTICS_SUMMARY_MEAN = ( StatisticsShortTerm.metadata_id, - func.avg(StatisticsShortTerm.mean), func.min(StatisticsShortTerm.min), func.max(StatisticsShortTerm.max), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.ARITHMETIC, + func.avg(StatisticsShortTerm.mean), + ), + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[0], + ), + else_=None, + ), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[1], + ), + else_=None, + ), + StatisticsMeta.mean_type, ) QUERY_STATISTICS_SUMMARY_SUM = ( @@ -136,31 +192,28 @@ QUERY_STATISTICS_SUMMARY_SUM = ( STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { - **{unit: AreaConverter for unit in AreaConverter.VALID_UNITS}, - **{ - unit: BloodGlucoseConcentrationConverter - for unit in BloodGlucoseConcentrationConverter.VALID_UNITS - }, - **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, - **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, - **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, - **{unit: DurationConverter for unit in DurationConverter.VALID_UNITS}, - **{unit: ElectricCurrentConverter for unit in ElectricCurrentConverter.VALID_UNITS}, - **{ - unit: ElectricPotentialConverter - for unit in ElectricPotentialConverter.VALID_UNITS - }, - **{unit: EnergyConverter for unit in EnergyConverter.VALID_UNITS}, - **{unit: EnergyDistanceConverter for unit in EnergyDistanceConverter.VALID_UNITS}, - **{unit: InformationConverter for unit in InformationConverter.VALID_UNITS}, - **{unit: MassConverter for unit in MassConverter.VALID_UNITS}, - **{unit: PowerConverter for unit in PowerConverter.VALID_UNITS}, - **{unit: PressureConverter for unit in PressureConverter.VALID_UNITS}, - **{unit: SpeedConverter for unit in SpeedConverter.VALID_UNITS}, - **{unit: TemperatureConverter for unit in TemperatureConverter.VALID_UNITS}, - **{unit: UnitlessRatioConverter for unit in UnitlessRatioConverter.VALID_UNITS}, - **{unit: VolumeConverter for unit in VolumeConverter.VALID_UNITS}, - **{unit: VolumeFlowRateConverter for unit in VolumeFlowRateConverter.VALID_UNITS}, + **dict.fromkeys(AreaConverter.VALID_UNITS, AreaConverter), + **dict.fromkeys( + BloodGlucoseConcentrationConverter.VALID_UNITS, + BloodGlucoseConcentrationConverter, + ), + **dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter), + **dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter), + **dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter), + **dict.fromkeys(DurationConverter.VALID_UNITS, DurationConverter), + **dict.fromkeys(ElectricCurrentConverter.VALID_UNITS, ElectricCurrentConverter), + **dict.fromkeys(ElectricPotentialConverter.VALID_UNITS, ElectricPotentialConverter), + **dict.fromkeys(EnergyConverter.VALID_UNITS, EnergyConverter), + **dict.fromkeys(EnergyDistanceConverter.VALID_UNITS, EnergyDistanceConverter), + **dict.fromkeys(InformationConverter.VALID_UNITS, InformationConverter), + **dict.fromkeys(MassConverter.VALID_UNITS, MassConverter), + **dict.fromkeys(PowerConverter.VALID_UNITS, PowerConverter), + **dict.fromkeys(PressureConverter.VALID_UNITS, PressureConverter), + **dict.fromkeys(SpeedConverter.VALID_UNITS, SpeedConverter), + **dict.fromkeys(TemperatureConverter.VALID_UNITS, TemperatureConverter), + **dict.fromkeys(UnitlessRatioConverter.VALID_UNITS, UnitlessRatioConverter), + **dict.fromkeys(VolumeConverter.VALID_UNITS, VolumeConverter), + **dict.fromkeys(VolumeFlowRateConverter.VALID_UNITS, VolumeFlowRateConverter), } @@ -183,6 +236,24 @@ def mean(values: list[float]) -> float | None: return sum(values) / len(values) +DEG_TO_RAD = math.pi / 180 +RAD_TO_DEG = 180 / math.pi + + +def weighted_circular_mean(values: Iterable[tuple[float, float]]) -> float: + """Return the weighted circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) * weight for x, weight in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) * weight for x, weight in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + +def circular_mean(values: list[float]) -> float: + """Return the circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) for x in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) for x in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + _LOGGER = logging.getLogger(__name__) @@ -375,11 +446,19 @@ def _compile_hourly_statistics_summary_mean_stmt( start_time_ts: float, end_time_ts: float ) -> StatementLambdaElement: """Generate the summary mean statement for hourly statistics.""" + # Due the fact that we support different mean type (See StatisticMeanType) + # we need to join here with the StatisticsMeta table to get the mean type + # and then use a case statement to compute the mean based on the mean type. + # As we use the StatisticsMeta.mean_type in the select case statement we need + # to group by it as well. return lambda_stmt( lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN) .filter(StatisticsShortTerm.start_ts >= start_time_ts) .filter(StatisticsShortTerm.start_ts < end_time_ts) - .group_by(StatisticsShortTerm.metadata_id) + .join( + StatisticsMeta, and_(StatisticsShortTerm.metadata_id == StatisticsMeta.id) + ) + .group_by(StatisticsShortTerm.metadata_id, StatisticsMeta.mean_type) .order_by(StatisticsShortTerm.metadata_id) ) @@ -421,10 +500,17 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None: if stats: for stat in stats: - metadata_id, _mean, _min, _max = stat + metadata_id, _min, _max, _mean, _mean_weight, _mean_type = stat + if ( + try_parse_enum(StatisticMeanType, _mean_type) + is StatisticMeanType.CIRCULAR + ): + # Normalize the circular mean to be in the range [0, 360) + _mean = _mean % 360 summary[metadata_id] = { "start_ts": start_time_ts, "mean": _mean, + "mean_weight": _mean_weight, "min": _min, "max": _max, } @@ -830,7 +916,7 @@ def _statistic_by_id_from_metadata( "display_unit_of_measurement": get_display_unit( hass, meta["statistic_id"], meta["unit_of_measurement"] ), - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -849,7 +935,9 @@ def _flatten_list_statistic_ids_metadata_result( { "statistic_id": _id, "display_unit_of_measurement": info["display_unit_of_measurement"], - "has_mean": info["has_mean"], + "has_mean": info["mean_type"] + == StatisticMeanType.ARITHMETIC, # Can be removed with 2026.4 + "mean_type": info["mean_type"], "has_sum": info["has_sum"], "name": info.get("name"), "source": info["source"], @@ -904,7 +992,7 @@ def list_statistic_ids( continue result[key] = { "display_unit_of_measurement": meta["unit_of_measurement"], - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -922,6 +1010,7 @@ def _reduce_statistics( period_start_end: Callable[[float], tuple[float, float]], period: timedelta, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily or monthly statistics.""" result: dict[str, list[StatisticsRow]] = defaultdict(list) @@ -949,7 +1038,13 @@ def _reduce_statistics( "end": end, } if _want_mean: - row["mean"] = mean(mean_values) if mean_values else None + row["mean"] = None + if mean_values: + match metadata[statistic_id][1]["mean_type"]: + case StatisticMeanType.ARITHMETIC: + row["mean"] = mean(mean_values) + case StatisticMeanType.CIRCULAR: + row["mean"] = circular_mean(mean_values) mean_values.clear() if _want_min: row["min"] = min(min_values) if min_values else None @@ -966,8 +1061,9 @@ def _reduce_statistics( result[statistic_id].append(row) if _want_max and (_max := statistic.get("max")) is not None: max_values.append(_max) - if _want_mean and (_mean := statistic.get("mean")) is not None: - mean_values.append(_mean) + if _want_mean: + if (_mean := statistic.get("mean")) is not None: + mean_values.append(_mean) if _want_min and (_min := statistic.get("min")) is not None: min_values.append(_min) prev_stat = statistic @@ -1014,11 +1110,12 @@ def reduce_day_ts_factory() -> tuple[ def _reduce_statistics_per_day( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily statistics.""" _same_day_ts, _day_start_end_ts = reduce_day_ts_factory() return _reduce_statistics( - stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types + stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types, metadata ) @@ -1062,11 +1159,12 @@ def reduce_week_ts_factory() -> tuple[ def _reduce_statistics_per_week( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to weekly statistics.""" _same_week_ts, _week_start_end_ts = reduce_week_ts_factory() return _reduce_statistics( - stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types + stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types, metadata ) @@ -1115,11 +1213,12 @@ def reduce_month_ts_factory() -> tuple[ def _reduce_statistics_per_month( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to monthly statistics.""" _same_month_ts, _month_start_end_ts = reduce_month_ts_factory() return _reduce_statistics( - stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types + stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types, metadata ) @@ -1163,27 +1262,41 @@ def _generate_max_mean_min_statistic_in_sub_period_stmt( return stmt +class _MaxMinMeanStatisticSubPeriod(TypedDict, total=False): + max: float + mean_acc: float + min: float + duration: float + circular_means: Required[list[tuple[float, float]]] + + def _get_max_mean_min_statistic_in_sub_period( session: Session, - result: dict[str, float], + result: _MaxMinMeanStatisticSubPeriod, start_time: datetime | None, end_time: datetime | None, table: type[StatisticsBase], types: set[Literal["max", "mean", "min", "change"]], - metadata_id: int, + metadata: tuple[int, StatisticMetaData], ) -> None: """Return max, mean and min during the period.""" # Calculate max, mean, min + mean_type = metadata[1]["mean_type"] columns = select() if "max" in types: columns = columns.add_columns(func.max(table.max)) if "mean" in types: - columns = columns.add_columns(func.avg(table.mean)) - columns = columns.add_columns(func.count(table.mean)) + match mean_type: + case StatisticMeanType.ARITHMETIC: + columns = columns.add_columns(func.avg(table.mean)) + columns = columns.add_columns(func.count(table.mean)) + case StatisticMeanType.CIRCULAR: + columns = columns.add_columns(*query_circular_mean(table)) if "min" in types: columns = columns.add_columns(func.min(table.min)) + stmt = _generate_max_mean_min_statistic_in_sub_period_stmt( - columns, start_time, end_time, table, metadata_id + columns, start_time, end_time, table, metadata[0] ) stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt)) if not stats: @@ -1191,11 +1304,21 @@ def _get_max_mean_min_statistic_in_sub_period( if "max" in types and (new_max := stats[0].max) is not None: old_max = result.get("max") result["max"] = max(new_max, old_max) if old_max is not None else new_max - if "mean" in types and stats[0].avg is not None: + if "mean" in types: # https://github.com/sqlalchemy/sqlalchemy/issues/9127 - duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] - result["duration"] = result.get("duration", 0.0) + duration - result["mean_acc"] = result.get("mean_acc", 0.0) + stats[0].avg * duration + match mean_type: + case StatisticMeanType.ARITHMETIC: + duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] + if stats[0].avg is not None: + result["duration"] = result.get("duration", 0.0) + duration + result["mean_acc"] = ( + result.get("mean_acc", 0.0) + stats[0].avg * duration + ) + case StatisticMeanType.CIRCULAR: + if (new_circular_mean := stats[0].mean) is not None and ( + weight := stats[0].mean_weight + ) is not None: + result["circular_means"].append((new_circular_mean, weight)) if "min" in types and (new_min := stats[0].min) is not None: old_min = result.get("min") result["min"] = min(new_min, old_min) if old_min is not None else new_min @@ -1210,15 +1333,15 @@ def _get_max_mean_min_statistic( tail_start_time: datetime | None, tail_end_time: datetime | None, tail_only: bool, - metadata_id: int, + metadata: tuple[int, StatisticMetaData], types: set[Literal["max", "mean", "min", "change"]], ) -> dict[str, float | None]: """Return max, mean and min during the period. - The mean is a time weighted average, combining hourly and 5-minute statistics if + The mean is time weighted, combining hourly and 5-minute statistics if necessary. """ - max_mean_min: dict[str, float] = {} + max_mean_min = _MaxMinMeanStatisticSubPeriod(circular_means=[]) result: dict[str, float | None] = {} if tail_start_time is not None: @@ -1230,7 +1353,7 @@ def _get_max_mean_min_statistic( tail_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if not tail_only: @@ -1241,7 +1364,7 @@ def _get_max_mean_min_statistic( main_end_time, Statistics, types, - metadata_id, + metadata, ) if head_start_time is not None: @@ -1252,16 +1375,23 @@ def _get_max_mean_min_statistic( head_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if "max" in types: result["max"] = max_mean_min.get("max") if "mean" in types: - if "mean_acc" not in max_mean_min: - result["mean"] = None - else: - result["mean"] = max_mean_min["mean_acc"] / max_mean_min["duration"] + mean_value = None + match metadata[1]["mean_type"]: + case StatisticMeanType.CIRCULAR: + if circular_means := max_mean_min["circular_means"]: + mean_value = weighted_circular_mean(circular_means) + case StatisticMeanType.ARITHMETIC: + if (mean_value := max_mean_min.get("mean_acc")) is not None and ( + duration := max_mean_min.get("duration") + ) is not None: + mean_value = mean_value / duration + result["mean"] = mean_value if "min" in types: result["min"] = max_mean_min.get("min") return result @@ -1562,7 +1692,7 @@ def statistic_during_period( tail_start_time, tail_end_time, tail_only, - metadata_id, + metadata, types, ) @@ -1645,7 +1775,7 @@ def _extract_metadata_and_discard_impossible_columns( has_sum = False for metadata_id, stats_metadata in metadata.values(): metadata_ids.append(metadata_id) - has_mean |= stats_metadata["has_mean"] + has_mean |= stats_metadata["mean_type"] is not StatisticMeanType.NONE has_sum |= stats_metadata["has_sum"] if not has_mean: types.discard("mean") @@ -1801,13 +1931,13 @@ def _statistics_during_period_with_session( ) if period == "day": - result = _reduce_statistics_per_day(result, types) + result = _reduce_statistics_per_day(result, types, metadata) if period == "week": - result = _reduce_statistics_per_week(result, types) + result = _reduce_statistics_per_week(result, types, metadata) if period == "month": - result = _reduce_statistics_per_month(result, types) + result = _reduce_statistics_per_month(result, types, metadata) if "change" in _types: _augment_result_with_change( diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index 43c2ecdc14f..0c8d47548bf 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -43,15 +43,15 @@ "fields": { "entity_id": { "name": "Entities to remove", - "description": "List of entities for which the data is to be removed from the recorder database." + "description": "List of entities for which the data is to be removed from the Recorder database." }, "domains": { "name": "Domains to remove", - "description": "List of domains for which the data needs to be removed from the recorder database." + "description": "List of domains for which the data needs to be removed from the Recorder database." }, "entity_globs": { "name": "Entity globs to remove", - "description": "List of glob patterns used to select the entities for which the data is to be removed from the recorder database." + "description": "List of glob patterns used to select the entities for which the data is to be removed from the Recorder database." }, "keep_days": { "name": "[%key:component::recorder::services::purge::fields::keep_days::name%]", diff --git a/homeassistant/components/recorder/table_managers/statistics_meta.py b/homeassistant/components/recorder/table_managers/statistics_meta.py index 77fc34518db..634e9565c12 100644 --- a/homeassistant/components/recorder/table_managers/statistics_meta.py +++ b/homeassistant/components/recorder/table_managers/statistics_meta.py @@ -4,16 +4,18 @@ from __future__ import annotations import logging import threading -from typing import TYPE_CHECKING, Final, Literal +from typing import TYPE_CHECKING, Any, Final, Literal from lru import LRU from sqlalchemy import lambda_stmt, select +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import true from sqlalchemy.sql.lambdas import StatementLambdaElement +from ..const import CIRCULAR_MEAN_SCHEMA_VERSION from ..db_schema import StatisticsMeta -from ..models import StatisticMetaData +from ..models import StatisticMeanType, StatisticMetaData from ..util import execute_stmt_lambda_element if TYPE_CHECKING: @@ -28,7 +30,6 @@ QUERY_STATISTIC_META = ( StatisticsMeta.statistic_id, StatisticsMeta.source, StatisticsMeta.unit_of_measurement, - StatisticsMeta.has_mean, StatisticsMeta.has_sum, StatisticsMeta.name, ) @@ -37,24 +38,38 @@ INDEX_ID: Final = 0 INDEX_STATISTIC_ID: Final = 1 INDEX_SOURCE: Final = 2 INDEX_UNIT_OF_MEASUREMENT: Final = 3 -INDEX_HAS_MEAN: Final = 4 -INDEX_HAS_SUM: Final = 5 -INDEX_NAME: Final = 6 +INDEX_HAS_SUM: Final = 4 +INDEX_NAME: Final = 5 +INDEX_MEAN_TYPE: Final = 6 def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, statistic_type: Literal["mean", "sum"] | None = None, statistic_source: str | None = None, + schema_version: int = 0, ) -> StatementLambdaElement: - """Generate a statement to fetch metadata.""" - stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META)) + """Generate a statement to fetch metadata with the passed filters. + + Depending on the schema version, either mean_type (added in version 49) or has_mean column is used. + """ + columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + columns.append(StatisticsMeta.mean_type) + else: + columns.append(StatisticsMeta.has_mean) + stmt = lambda_stmt(lambda: select(*columns)) if statistic_ids: stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids)) if statistic_source is not None: stmt += lambda q: q.where(StatisticsMeta.source == statistic_source) if statistic_type == "mean": - stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + stmt += lambda q: q.where( + StatisticsMeta.mean_type != StatisticMeanType.NONE + ) + else: + stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) elif statistic_type == "sum": stmt += lambda q: q.where(StatisticsMeta.has_sum == true()) return stmt @@ -100,14 +115,34 @@ class StatisticsMetaManager: for row in execute_stmt_lambda_element( session, _generate_get_metadata_stmt( - statistic_ids, statistic_type, statistic_source + statistic_ids, + statistic_type, + statistic_source, + self.recorder.schema_version, ), orm_rows=False, ): statistic_id = row[INDEX_STATISTIC_ID] row_id = row[INDEX_ID] + if self.recorder.schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + try: + mean_type = StatisticMeanType(row[INDEX_MEAN_TYPE]) + except ValueError: + _LOGGER.warning( + "Invalid mean type found for statistic_id: %s, mean_type: %s. Skipping", + statistic_id, + row[INDEX_MEAN_TYPE], + ) + continue + else: + mean_type = ( + StatisticMeanType.ARITHMETIC + if row[INDEX_MEAN_TYPE] + else StatisticMeanType.NONE + ) meta = { - "has_mean": row[INDEX_HAS_MEAN], + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": row[INDEX_HAS_SUM], "name": row[INDEX_NAME], "source": row[INDEX_SOURCE], @@ -157,9 +192,18 @@ class StatisticsMetaManager: This call is not thread-safe and must be called from the recorder thread. """ + if "mean_type" not in new_metadata: + # To maintain backward compatibility after adding 'mean_type' in schema version 49, + # we must still check for its presence. Even though type hints suggest it should always exist, + # custom integrations might omit it, so we need to guard against that. + new_metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if new_metadata["has_mean"] + else StatisticMeanType.NONE + ) metadata_id, old_metadata = old_metadata_dict[statistic_id] if not ( - old_metadata["has_mean"] != new_metadata["has_mean"] + old_metadata["mean_type"] != new_metadata["mean_type"] or old_metadata["has_sum"] != new_metadata["has_sum"] or old_metadata["name"] != new_metadata["name"] or old_metadata["unit_of_measurement"] @@ -170,7 +214,7 @@ class StatisticsMetaManager: self._assert_in_recorder_thread() session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update( { - StatisticsMeta.has_mean: new_metadata["has_mean"], + StatisticsMeta.mean_type: new_metadata["mean_type"], StatisticsMeta.has_sum: new_metadata["has_sum"], StatisticsMeta.name: new_metadata["name"], StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"], diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index d23ecab3dac..f4058943971 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -37,7 +37,7 @@ from homeassistant.util.unit_conversion import ( VolumeFlowRateConverter, ) -from .models import StatisticPeriod +from .models import StatisticMeanType, StatisticPeriod from .statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, async_add_external_statistics, @@ -532,6 +532,10 @@ def ws_import_statistics( ) -> None: """Import statistics.""" metadata = msg["metadata"] + # The WS command will be changed in a follow up PR + metadata["mean_type"] = ( + StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE + ) stats = msg["stats"] if valid_entity_id(metadata["statistic_id"]): diff --git a/homeassistant/components/remote_calendar/__init__.py b/homeassistant/components/remote_calendar/__init__.py new file mode 100644 index 00000000000..910eeae8268 --- /dev/null +++ b/homeassistant/components/remote_calendar/__init__.py @@ -0,0 +1,33 @@ +"""The Remote Calendar integration.""" + +import logging + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import RemoteCalendarConfigEntry, RemoteCalendarDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +PLATFORMS: list[Platform] = [Platform.CALENDAR] + + +async def async_setup_entry( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> bool: + """Set up Remote Calendar from a config entry.""" + hass.data.setdefault(DOMAIN, {}) + coordinator = RemoteCalendarDataUpdateCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> bool: + """Handle unload of an entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/remote_calendar/calendar.py b/homeassistant/components/remote_calendar/calendar.py new file mode 100644 index 00000000000..bd83a5f18cc --- /dev/null +++ b/homeassistant/components/remote_calendar/calendar.py @@ -0,0 +1,92 @@ +"""Calendar platform for a Remote Calendar.""" + +from datetime import datetime +import logging + +from ical.event import Event + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import dt as dt_util + +from . import RemoteCalendarConfigEntry +from .const import CONF_CALENDAR_NAME +from .coordinator import RemoteCalendarDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: RemoteCalendarConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the remote calendar platform.""" + coordinator = entry.runtime_data + entity = RemoteCalendarEntity(coordinator, entry) + async_add_entities([entity]) + + +class RemoteCalendarEntity( + CoordinatorEntity[RemoteCalendarDataUpdateCoordinator], CalendarEntity +): + """A calendar entity backed by a remote iCalendar url.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: RemoteCalendarDataUpdateCoordinator, + entry: RemoteCalendarConfigEntry, + ) -> None: + """Initialize RemoteCalendarEntity.""" + super().__init__(coordinator) + self._attr_name = entry.data[CONF_CALENDAR_NAME] + self._attr_unique_id = entry.entry_id + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + now = dt_util.now() + events = self.coordinator.data.timeline_tz(now.tzinfo).active_after(now) + if event := next(events, None): + return _get_calendar_event(event) + return None + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Get all events in a specific time frame.""" + events = self.coordinator.data.timeline_tz(start_date.tzinfo).overlapping( + start_date, + end_date, + ) + return [_get_calendar_event(event) for event in events] + + +def _get_calendar_event(event: Event) -> CalendarEvent: + """Return a CalendarEvent from an API event.""" + + return CalendarEvent( + summary=event.summary, + start=( + dt_util.as_local(event.start) + if isinstance(event.start, datetime) + else event.start + ), + end=( + dt_util.as_local(event.end) + if isinstance(event.end, datetime) + else event.end + ), + description=event.description, + uid=event.uid, + rrule=event.rrule.as_rrule_str() if event.rrule else None, + recurrence_id=event.recurrence_id, + location=event.location, + ) diff --git a/homeassistant/components/remote_calendar/config_flow.py b/homeassistant/components/remote_calendar/config_flow.py new file mode 100644 index 00000000000..03d0e7ea96a --- /dev/null +++ b/homeassistant/components/remote_calendar/config_flow.py @@ -0,0 +1,70 @@ +"""Config flow for Remote Calendar integration.""" + +import logging +from typing import Any + +from httpx import HTTPError, InvalidURL +from ical.calendar_stream import IcsCalendarStream +from ical.exceptions import CalendarParseError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_URL +from homeassistant.helpers.httpx_client import get_async_client + +from .const import CONF_CALENDAR_NAME, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_CALENDAR_NAME): str, + vol.Required(CONF_URL): str, + } +) + + +class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Remote Calendar.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + if user_input is None: + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA + ) + errors: dict = {} + _LOGGER.debug("User input: %s", user_input) + self._async_abort_entries_match( + {CONF_CALENDAR_NAME: user_input[CONF_CALENDAR_NAME]} + ) + self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]}) + client = get_async_client(self.hass) + try: + res = await client.get(user_input[CONF_URL], follow_redirects=True) + res.raise_for_status() + except (HTTPError, InvalidURL) as err: + errors["base"] = "cannot_connect" + _LOGGER.debug("An error occurred: %s", err) + else: + try: + await self.hass.async_add_executor_job( + IcsCalendarStream.calendar_from_ics, res.text + ) + except CalendarParseError as err: + errors["base"] = "invalid_ics_file" + _LOGGER.debug("Invalid .ics file: %s", err) + else: + return self.async_create_entry( + title=user_input[CONF_CALENDAR_NAME], data=user_input + ) + + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/remote_calendar/const.py b/homeassistant/components/remote_calendar/const.py new file mode 100644 index 00000000000..060d7633111 --- /dev/null +++ b/homeassistant/components/remote_calendar/const.py @@ -0,0 +1,4 @@ +"""Constants for the Remote Calendar integration.""" + +DOMAIN = "remote_calendar" +CONF_CALENDAR_NAME = "calendar_name" diff --git a/homeassistant/components/remote_calendar/coordinator.py b/homeassistant/components/remote_calendar/coordinator.py new file mode 100644 index 00000000000..6caec297c1a --- /dev/null +++ b/homeassistant/components/remote_calendar/coordinator.py @@ -0,0 +1,71 @@ +"""Data UpdateCoordinator for the Remote Calendar integration.""" + +from datetime import timedelta +import logging + +from httpx import HTTPError, InvalidURL +from ical.calendar import Calendar +from ical.calendar_stream import IcsCalendarStream +from ical.exceptions import CalendarParseError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +type RemoteCalendarConfigEntry = ConfigEntry[RemoteCalendarDataUpdateCoordinator] + +_LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(days=1) + + +class RemoteCalendarDataUpdateCoordinator(DataUpdateCoordinator[Calendar]): + """Class to manage fetching calendar data.""" + + config_entry: RemoteCalendarConfigEntry + ics: str + + def __init__( + self, + hass: HomeAssistant, + config_entry: RemoteCalendarConfigEntry, + ) -> None: + """Initialize data updater.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + always_update=True, + ) + self._client = get_async_client(hass) + self._url = config_entry.data[CONF_URL] + + async def _async_update_data(self) -> Calendar: + """Update data from the url.""" + try: + res = await self._client.get(self._url, follow_redirects=True) + res.raise_for_status() + except (HTTPError, InvalidURL) as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unable_to_fetch", + translation_placeholders={"err": str(err)}, + ) from err + try: + # calendar_from_ics will dynamically load packages + # the first time it is called, so we need to do it + # in a separate thread to avoid blocking the event loop + self.ics = res.text + return await self.hass.async_add_executor_job( + IcsCalendarStream.calendar_from_ics, self.ics + ) + except CalendarParseError as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unable_to_parse", + translation_placeholders={"err": str(err)}, + ) from err diff --git a/homeassistant/components/remote_calendar/diagnostics.py b/homeassistant/components/remote_calendar/diagnostics.py new file mode 100644 index 00000000000..5ebfb3d3812 --- /dev/null +++ b/homeassistant/components/remote_calendar/diagnostics.py @@ -0,0 +1,25 @@ +"""Provides diagnostics for the remote calendar.""" + +import datetime +from typing import Any + +from ical.diagnostics import redact_ics + +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from . import RemoteCalendarConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + payload: dict[str, Any] = { + "now": dt_util.now().isoformat(), + "timezone": str(dt_util.get_default_time_zone()), + "system_timezone": str(datetime.datetime.now().astimezone().tzinfo), + } + payload["ics"] = "\n".join(redact_ics(coordinator.ics)) + return payload diff --git a/homeassistant/components/remote_calendar/manifest.json b/homeassistant/components/remote_calendar/manifest.json new file mode 100644 index 00000000000..fe17a3d2c34 --- /dev/null +++ b/homeassistant/components/remote_calendar/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "remote_calendar", + "name": "Remote Calendar", + "codeowners": ["@Thomas55555"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/remote_calendar", + "integration_type": "service", + "iot_class": "cloud_polling", + "loggers": ["ical"], + "quality_scale": "silver", + "requirements": ["ical==9.0.1"] +} diff --git a/homeassistant/components/remote_calendar/quality_scale.yaml b/homeassistant/components/remote_calendar/quality_scale.yaml new file mode 100644 index 00000000000..964b63d7116 --- /dev/null +++ b/homeassistant/components/remote_calendar/quality_scale.yaml @@ -0,0 +1,98 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: + status: exempt + comment: | + No unique identifier. + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + There are no actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No actions available. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + There are no actions. + reauthentication-flow: + status: exempt + comment: | + There is no authentication required. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: no configuration options + + # Gold + devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + diagnostics: done + discovery-update-info: + status: todo + comment: No discovery protocol available. + discovery: + status: exempt + comment: No discovery protocol available. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + entity-category: done + entity-device-class: + status: exempt + comment: No devices classes for calendars. + entity-disabled-by-default: + status: exempt + comment: Only one entity per entry. + entity-translations: + status: exempt + comment: Entity name is defined by the user, so no translation possible. + exception-translations: done + icon-translations: + status: exempt + comment: Only the default icon is used. + reconfiguration-flow: + status: exempt + comment: no configuration possible + repair-issues: todo + stale-devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/remote_calendar/strings.json b/homeassistant/components/remote_calendar/strings.json new file mode 100644 index 00000000000..1ad62821818 --- /dev/null +++ b/homeassistant/components/remote_calendar/strings.json @@ -0,0 +1,33 @@ +{ + "title": "Remote Calendar", + "config": { + "step": { + "user": { + "description": "Please choose a name for the calendar to be imported", + "data": { + "calendar_name": "Calendar name", + "url": "Calendar URL" + }, + "data_description": { + "calendar_name": "The name of the calendar shown in the UI.", + "url": "The URL of the remote calendar." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_ics_file": "[%key:component::local_calendar::config::error::invalid_ics_file%]" + } + }, + "exceptions": { + "unable_to_fetch": { + "message": "Unable to fetch calendar data: {err}" + }, + "unable_to_parse": { + "message": "Unable to parse calendar data: {err}" + } + } +} diff --git a/homeassistant/components/remote_rpi_gpio/binary_sensor.py b/homeassistant/components/remote_rpi_gpio/binary_sensor.py index 42e8517c1e8..1d970bb3541 100644 --- a/homeassistant/components/remote_rpi_gpio/binary_sensor.py +++ b/homeassistant/components/remote_rpi_gpio/binary_sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +from gpiozero import DigitalInputDevice import requests import voluptuous as vol @@ -48,10 +49,10 @@ def setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Raspberry PI GPIO devices.""" - address = config["host"] + address = config[CONF_HOST] invert_logic = config[CONF_INVERT_LOGIC] pull_mode = config[CONF_PULL_MODE] - ports = config["ports"] + ports = config[CONF_PORTS] bouncetime = config[CONF_BOUNCETIME] / 1000 devices = [] @@ -71,9 +72,11 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): _attr_should_poll = False - def __init__(self, name, sensor, invert_logic): + def __init__( + self, name: str | None, sensor: DigitalInputDevice, invert_logic: bool + ) -> None: """Initialize the RPi binary sensor.""" - self._name = name + self._attr_name = name self._invert_logic = invert_logic self._state = False self._sensor = sensor @@ -90,20 +93,10 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): self._sensor.when_activated = read_gpio @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def is_on(self): + def is_on(self) -> bool: """Return the state of the entity.""" return self._state != self._invert_logic - @property - def device_class(self): - """Return the class of this sensor, from DEVICE_CLASSES.""" - return - def update(self) -> None: """Update the GPIO state.""" try: diff --git a/homeassistant/components/remote_rpi_gpio/switch.py b/homeassistant/components/remote_rpi_gpio/switch.py index 91b389c5a1e..25f95045e4b 100644 --- a/homeassistant/components/remote_rpi_gpio/switch.py +++ b/homeassistant/components/remote_rpi_gpio/switch.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any +from gpiozero import LED import voluptuous as vol from homeassistant.components.switch import ( @@ -57,37 +58,23 @@ def setup_platform( class RemoteRPiGPIOSwitch(SwitchEntity): """Representation of a Remote Raspberry Pi GPIO.""" + _attr_assumed_state = True _attr_should_poll = False - def __init__(self, name, led): + def __init__(self, name: str | None, led: LED) -> None: """Initialize the pin.""" - self._name = name or DEVICE_DEFAULT_NAME - self._state = False + self._attr_name = name or DEVICE_DEFAULT_NAME + self._attr_is_on = False self._switch = led - @property - def name(self): - """Return the name of the switch.""" - return self._name - - @property - def assumed_state(self): - """If unable to access real state of the entity.""" - return True - - @property - def is_on(self): - """Return true if device is on.""" - return self._state - def turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" write_output(self._switch, 1) - self._state = True + self._attr_is_on = True self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" write_output(self._switch, 0) - self._state = False + self._attr_is_on = False self.schedule_update_ha_state() diff --git a/homeassistant/components/renault/config_flow.py b/homeassistant/components/renault/config_flow.py index 70544a5637f..90d2c11613c 100644 --- a/homeassistant/components/renault/config_flow.py +++ b/homeassistant/components/renault/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_KAMEREON_ACCOUNT_ID, CONF_LOCALE, DOMAIN from .renault_hub import RenaultHub +_LOGGER = logging.getLogger(__name__) + USER_SCHEMA = vol.Schema( { vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), @@ -54,7 +57,8 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): ) except (aiohttp.ClientConnectionError, GigyaException): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: if login_success: diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 71ca5428740..99ca91c5bdf 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -28,7 +28,7 @@ from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost from .services import async_setup_services @@ -67,9 +67,7 @@ async def async_setup_entry( hass: HomeAssistant, config_entry: ReolinkConfigEntry ) -> bool: """Set up Reolink from a config entry.""" - host = ReolinkHost( - hass, config_entry.data, config_entry.options, config_entry.entry_id - ) + host = ReolinkHost(hass, config_entry.data, config_entry.options, config_entry) try: await host.async_init() @@ -100,6 +98,7 @@ async def async_setup_entry( or host.api.use_https != config_entry.data[CONF_USE_HTTPS] or host.api.supported(None, "privacy_mode") != config_entry.data.get(CONF_SUPPORTS_PRIVACY_MODE) + or host.api.baichuan.port != config_entry.data.get(CONF_BC_PORT) ): if host.api.port != config_entry.data[CONF_PORT]: _LOGGER.warning( @@ -108,10 +107,21 @@ async def async_setup_entry( config_entry.data[CONF_PORT], host.api.port, ) + if ( + config_entry.data.get(CONF_BC_PORT, host.api.baichuan.port) + != host.api.baichuan.port + ): + _LOGGER.warning( + "Baichuan port of Reolink %s, changed from %s to %s", + host.api.nvr_name, + config_entry.data.get(CONF_BC_PORT), + host.api.baichuan.port, + ) data = { **config_entry.data, CONF_PORT: host.api.port, CONF_USE_HTTPS: host.api.use_https, + CONF_BC_PORT: host.api.baichuan.port, CONF_SUPPORTS_PRIVACY_MODE: host.api.supported(None, "privacy_mode"), } hass.config_entries.async_update_entry(config_entry, data=data) diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index 4e90bfc9eef..39910bbc52a 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -25,7 +25,11 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkEntityDescription, +) from .util import ReolinkConfigEntry, ReolinkData PARALLEL_UPDATES = 0 @@ -41,6 +45,18 @@ class ReolinkBinarySensorEntityDescription( value: Callable[[Host, int], bool] +@dataclass(frozen=True, kw_only=True) +class ReolinkSmartAIBinarySensorEntityDescription( + BinarySensorEntityDescription, + ReolinkEntityDescription, +): + """A class that describes Smart AI binary sensor entities.""" + + smart_type: str + value: Callable[[Host, int, int], bool] + supported: Callable[[Host, int, int], bool] = lambda api, ch, loc: True + + BINARY_PUSH_SENSORS = ( ReolinkBinarySensorEntityDescription( key="motion", @@ -121,6 +137,142 @@ BINARY_SENSORS = ( ), ) +BINARY_SMART_AI_SENSORS = ( + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_person", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "people" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_vehicle", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_dog_cat", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_person", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "people" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_vehicle", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_dog_cat", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_person", + smart_type="loitering", + cmd_id=33, + translation_key="linger_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "people" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_vehicle", + smart_type="loitering", + cmd_id=33, + translation_key="linger_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_dog_cat", + smart_type="loitering", + cmd_id=33, + translation_key="linger_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="forgotten_item", + smart_type="legacy", + cmd_id=33, + translation_key="forgotten_item", + value=lambda api, ch, loc: (api.baichuan.smart_ai_state(ch, "legacy", loc)), + supported=lambda api, ch, loc: api.supported(ch, "ai_forgotten_item"), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="taken_item", + smart_type="loss", + cmd_id=33, + translation_key="taken_item", + value=lambda api, ch, loc: (api.baichuan.smart_ai_state(ch, "loss", loc)), + supported=lambda api, ch, loc: api.supported(ch, "ai_taken_item"), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -129,18 +281,29 @@ async def async_setup_entry( ) -> None: """Set up a Reolink IP Camera.""" reolink_data: ReolinkData = config_entry.runtime_data + api = reolink_data.host.api - entities: list[ReolinkBinarySensorEntity] = [] - for channel in reolink_data.host.api.channels: + entities: list[ReolinkBinarySensorEntity | ReolinkSmartAIBinarySensorEntity] = [] + for channel in api.channels: entities.extend( ReolinkPushBinarySensorEntity(reolink_data, channel, entity_description) for entity_description in BINARY_PUSH_SENSORS - if entity_description.supported(reolink_data.host.api, channel) + if entity_description.supported(api, channel) ) entities.extend( ReolinkBinarySensorEntity(reolink_data, channel, entity_description) for entity_description in BINARY_SENSORS - if entity_description.supported(reolink_data.host.api, channel) + if entity_description.supported(api, channel) + ) + entities.extend( + ReolinkSmartAIBinarySensorEntity( + reolink_data, channel, location, entity_description + ) + for entity_description in BINARY_SMART_AI_SENSORS + for location in api.baichuan.smart_location_list( + channel, entity_description.key + ) + if entity_description.supported(api, channel, location) ) async_add_entities(entities) @@ -198,3 +361,40 @@ class ReolinkPushBinarySensorEntity(ReolinkBinarySensorEntity): async def _async_handle_event(self, event: str) -> None: """Handle incoming event for motion detection.""" self.async_write_ha_state() + + +class ReolinkSmartAIBinarySensorEntity( + ReolinkChannelCoordinatorEntity, BinarySensorEntity +): + """Binary-sensor class for Reolink IP camera Smart AI sensors.""" + + entity_description: ReolinkSmartAIBinarySensorEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + location: int, + entity_description: ReolinkSmartAIBinarySensorEntityDescription, + ) -> None: + """Initialize Reolink binary sensor.""" + self.entity_description = entity_description + super().__init__(reolink_data, channel) + unique_index = self._host.api.baichuan.smart_ai_index( + channel, entity_description.smart_type, location + ) + self._attr_unique_id = f"{self._attr_unique_id}_{unique_index}" + + self._location = location + self._attr_translation_placeholders = { + "zone_name": self._host.api.baichuan.smart_ai_name( + channel, entity_description.smart_type, location + ) + } + + @property + def is_on(self) -> bool: + """State of the sensor.""" + return self.entity_description.value( + self._host.api, self._channel, self._location + ) diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 7943cadef21..12ccd455be3 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -8,6 +8,7 @@ import logging from typing import Any from reolink_aio.api import ALLOWED_SPECIAL_CHARS +from reolink_aio.baichuan import DEFAULT_BC_PORT from reolink_aio.exceptions import ( ApiError, CredentialsInvalidError, @@ -37,7 +38,7 @@ from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import ( PasswordIncompatible, ReolinkException, @@ -287,6 +288,7 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): if not errors: user_input[CONF_PORT] = host.api.port user_input[CONF_USE_HTTPS] = host.api.use_https + user_input[CONF_BC_PORT] = host.api.baichuan.port user_input[CONF_SUPPORTS_PRIVACY_MODE] = host.api.supported( None, "privacy_mode" ) @@ -326,8 +328,9 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): if errors: data_schema = data_schema.extend( { - vol.Optional(CONF_PORT): cv.positive_int, + vol.Optional(CONF_PORT): cv.port, vol.Required(CONF_USE_HTTPS, default=False): bool, + vol.Required(CONF_BC_PORT, default=DEFAULT_BC_PORT): cv.port, } ) diff --git a/homeassistant/components/reolink/const.py b/homeassistant/components/reolink/const.py index 7bd93337c46..026d1219881 100644 --- a/homeassistant/components/reolink/const.py +++ b/homeassistant/components/reolink/const.py @@ -3,4 +3,5 @@ DOMAIN = "reolink" CONF_USE_HTTPS = "use_https" +CONF_BC_PORT = "baichuan_port" CONF_SUPPORTS_PRIVACY_MODE = "privacy_mode_supported" diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 55ce4ce891e..ec598de663d 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -178,8 +178,13 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): else: self._dev_id = f"{self._host.unique_id}_ch{dev_ch}" + connections = set() + if mac := self._host.api.baichuan.mac_address(dev_ch): + connections.add((CONNECTION_NETWORK_MAC, mac)) + self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._dev_id)}, + connections=connections, via_device=(DOMAIN, self._host.unique_id), name=self._host.api.camera_name(dev_ch), model=self._host.api.camera_model(dev_ch), diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 2f646ba9090..a027177f1fc 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -12,6 +12,7 @@ from typing import Any, Literal import aiohttp from aiohttp.web import Request from reolink_aio.api import ALLOWED_SPECIAL_CHARS, Host +from reolink_aio.baichuan import DEFAULT_BC_PORT from reolink_aio.enums import SubType from reolink_aio.exceptions import NotSupportedError, ReolinkError, SubscriptionError @@ -33,14 +34,14 @@ from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.storage import Store from homeassistant.util.ssl import SSLCipherList -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import ( PasswordIncompatible, ReolinkSetupException, ReolinkWebhookException, UserNotAdmin, ) -from .util import get_store +from .util import ReolinkConfigEntry, get_store DEFAULT_TIMEOUT = 30 FIRST_TCP_PUSH_TIMEOUT = 10 @@ -66,11 +67,11 @@ class ReolinkHost: hass: HomeAssistant, config: Mapping[str, Any], options: Mapping[str, Any], - config_entry_id: str | None = None, + config_entry: ReolinkConfigEntry | None = None, ) -> None: """Initialize Reolink Host. Could be either NVR, or Camera.""" self._hass: HomeAssistant = hass - self._config_entry_id = config_entry_id + self._config_entry = config_entry self._config = config self._unique_id: str = "" @@ -91,6 +92,7 @@ class ReolinkHost: protocol=options[CONF_PROTOCOL], timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=get_aiohttp_session, + bc_port=config.get(CONF_BC_PORT, DEFAULT_BC_PORT), ) self.last_wake: float = 0 @@ -149,15 +151,33 @@ class ReolinkHost: async def async_init(self) -> None: """Connect to Reolink host.""" if not self._api.valid_password(): + if ( + len(self._config[CONF_PASSWORD]) >= 32 + and self._config_entry is not None + ): + ir.async_create_issue( + self._hass, + DOMAIN, + f"password_too_long_{self._config_entry.entry_id}", + is_fixable=True, + severity=ir.IssueSeverity.ERROR, + translation_key="password_too_long", + translation_placeholders={"name": self._config_entry.title}, + ) + raise PasswordIncompatible( - "Reolink password contains incompatible special character, " - "please change the password to only contain characters: " - f"a-z, A-Z, 0-9 or {ALLOWED_SPECIAL_CHARS}" + "Reolink password contains incompatible special character or " + "is too long, please change the password to only contain characters: " + f"a-z, A-Z, 0-9 or {ALLOWED_SPECIAL_CHARS} " + "and not be longer than 31 characters" ) store: Store[str] | None = None - if self._config_entry_id is not None: - store = get_store(self._hass, self._config_entry_id) + if self._config_entry is not None: + ir.async_delete_issue( + self._hass, DOMAIN, f"password_too_long_{self._config_entry.entry_id}" + ) + store = get_store(self._hass, self._config_entry.entry_id) if self._config.get(CONF_SUPPORTS_PRIVACY_MODE) and ( data := await store.async_load() ): diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 26198a11594..7d1dba099ed 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -54,6 +54,72 @@ "state": { "on": "mdi:sleep" } + }, + "crossline_person": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "crossline_vehicle": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "crossline_dog_cat": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "intrusion_person": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "intrusion_vehicle": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "intrusion_dog_cat": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "linger_person": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "linger_vehicle": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "linger_dog_cat": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "forgotten_item": { + "default": "mdi:package-variant-closed-plus", + "state": { + "on": "mdi:package-variant-closed-check" + } + }, + "taken_item": { + "default": "mdi:package-variant-closed-minus", + "state": { + "on": "mdi:package-variant-closed-check" + } } }, "button": { @@ -151,6 +217,21 @@ "ai_animal_sensitivity": { "default": "mdi:paw" }, + "crossline_sensitivity": { + "default": "mdi:fence" + }, + "intrusion_sensitivity": { + "default": "mdi:location-enter" + }, + "linger_sensitivity": { + "default": "mdi:account-switch" + }, + "forgotten_item_sensitivity": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_sensitivity": { + "default": "mdi:package-variant-closed-minus" + }, "ai_face_delay": { "default": "mdi:face-recognition" }, @@ -169,6 +250,18 @@ "ai_animal_delay": { "default": "mdi:paw" }, + "intrusion_delay": { + "default": "mdi:location-enter" + }, + "linger_delay": { + "default": "mdi:account-switch" + }, + "forgotten_item_delay": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_delay": { + "default": "mdi:package-variant-closed-minus" + }, "auto_quick_reply_time": { "default": "mdi:message-reply-text-outline" }, @@ -284,6 +377,9 @@ }, "sub_bit_rate": { "default": "mdi:play-speed" + }, + "scene_mode": { + "default": "mdi:view-list" } }, "sensor": { @@ -299,6 +395,9 @@ "battery_state": { "default": "mdi:battery-charging" }, + "day_night_state": { + "default": "mdi:theme-light-dark" + }, "wifi_signal": { "default": "mdi:wifi" }, diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index f923efdbbf2..82b9586cccc 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.12.1"] + "requirements": ["reolink-aio==0.13.0"] } diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 48382df4cbc..2a6fb740ee0 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -9,6 +9,7 @@ from typing import Any from reolink_aio.api import Chime, Host from homeassistant.components.number import ( + NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, @@ -44,6 +45,19 @@ class ReolinkNumberEntityDescription( value: Callable[[Host, int], float | None] +@dataclass(frozen=True, kw_only=True) +class ReolinkSmartAINumberEntityDescription( + NumberEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes smart AI number entities.""" + + smart_type: str + method: Callable[[Host, int, int, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Host, int, int], float | None] + + @dataclass(frozen=True, kw_only=True) class ReolinkHostNumberEntityDescription( NumberEntityDescription, @@ -125,6 +139,7 @@ NUMBER_ENTITIES = ( cmd_key="GetPtzGuard", translation_key="guard_return_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=10, @@ -248,6 +263,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_face_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -264,6 +280,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_person_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -280,6 +297,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_vehicle_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -296,6 +314,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_package_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -312,6 +331,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_pet_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -330,6 +350,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_animal_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -346,6 +367,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAutoReply", translation_key="auto_quick_reply_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -385,6 +407,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_disappear_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -400,6 +423,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_stop_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -493,6 +517,168 @@ NUMBER_ENTITIES = ( ), ) +SMART_AI_NUMBER_ENTITIES = ( + ReolinkSmartAINumberEntityDescription( + key="crossline_sensitivity", + smart_type="crossline", + cmd_id=527, + translation_key="crossline_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_crossline"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "crossline", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "crossline", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_sensitivity", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "intrusion", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_sensitivity", + smart_type="loitering", + cmd_id=531, + translation_key="linger_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "loitering", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_sensitivity", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "legacy", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_sensitivity", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_sensitivity(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_delay", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=0, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "intrusion", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_delay", + smart_type="loitering", + cmd_id=531, + translation_key="linger_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loitering", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_delay", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "legacy", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_delay", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, delay=int(value) + ), + ), +) + HOST_NUMBER_ENTITIES = ( ReolinkHostNumberEntityDescription( key="alarm_volume", @@ -542,22 +728,32 @@ async def async_setup_entry( ) -> None: """Set up a Reolink number entities.""" reolink_data: ReolinkData = config_entry.runtime_data + api = reolink_data.host.api entities: list[NumberEntity] = [ ReolinkNumberEntity(reolink_data, channel, entity_description) for entity_description in NUMBER_ENTITIES - for channel in reolink_data.host.api.channels - if entity_description.supported(reolink_data.host.api, channel) + for channel in api.channels + if entity_description.supported(api, channel) ] + entities.extend( + ReolinkSmartAINumberEntity(reolink_data, channel, location, entity_description) + for entity_description in SMART_AI_NUMBER_ENTITIES + for channel in api.channels + for location in api.baichuan.smart_location_list( + channel, entity_description.smart_type + ) + if entity_description.supported(api, channel) + ) entities.extend( ReolinkHostNumberEntity(reolink_data, entity_description) for entity_description in HOST_NUMBER_ENTITIES - if entity_description.supported(reolink_data.host.api) + if entity_description.supported(api) ) entities.extend( ReolinkChimeNumberEntity(reolink_data, chime, entity_description) for entity_description in CHIME_NUMBER_ENTITIES - for chime in reolink_data.host.api.chime_list + for chime in api.chime_list ) async_add_entities(entities) @@ -599,6 +795,51 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): self.async_write_ha_state() +class ReolinkSmartAINumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): + """Base smart AI number entity class for Reolink IP cameras.""" + + entity_description: ReolinkSmartAINumberEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + location: int, + entity_description: ReolinkSmartAINumberEntityDescription, + ) -> None: + """Initialize Reolink number entity.""" + self.entity_description = entity_description + super().__init__(reolink_data, channel) + + unique_index = self._host.api.baichuan.smart_ai_index( + channel, entity_description.smart_type, location + ) + self._attr_unique_id = f"{self._attr_unique_id}_{unique_index}" + + self._location = location + self._attr_mode = entity_description.mode + self._attr_translation_placeholders = { + "zone_name": self._host.api.baichuan.smart_ai_name( + channel, entity_description.smart_type, location + ) + } + + @property + def native_value(self) -> float | None: + """State of the number entity.""" + return self.entity_description.value( + self._host.api, self._channel, self._location + ) + + @raise_translated_error + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + await self.entity_description.method( + self._host.api, self._channel, self._location, value + ) + self.async_write_ha_state() + + class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): """Base number entity class for Reolink Host.""" diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index c0b20da0238..e5d66ed3901 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -30,6 +30,8 @@ from .entity import ( ReolinkChannelEntityDescription, ReolinkChimeCoordinatorEntity, ReolinkChimeEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, ) from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error @@ -49,6 +51,18 @@ class ReolinkSelectEntityDescription( value: Callable[[Host, int], str] | None = None +@dataclass(frozen=True, kw_only=True) +class ReolinkHostSelectEntityDescription( + SelectEntityDescription, + ReolinkHostEntityDescription, +): + """A class that describes host select entities.""" + + get_options: Callable[[Host], list[str]] + method: Callable[[Host, str], Any] + value: Callable[[Host], str] + + @dataclass(frozen=True, kw_only=True) class ReolinkChimeSelectEntityDescription( SelectEntityDescription, @@ -238,6 +252,19 @@ SELECT_ENTITIES = ( ), ) +HOST_SELECT_ENTITIES = ( + ReolinkHostSelectEntityDescription( + key="scene_mode", + cmd_key="GetScene", + translation_key="scene_mode", + entity_category=EntityCategory.CONFIG, + get_options=lambda api: api.baichuan.scene_names, + supported=lambda api: api.supported(None, "scenes"), + value=lambda api: api.baichuan.active_scene, + method=lambda api, name: api.baichuan.set_scene(scene_name=name), + ), +) + CHIME_SELECT_ENTITIES = ( ReolinkChimeSelectEntityDescription( key="motion_tone", @@ -300,12 +327,19 @@ async def async_setup_entry( """Set up a Reolink select entities.""" reolink_data: ReolinkData = config_entry.runtime_data - entities: list[ReolinkSelectEntity | ReolinkChimeSelectEntity] = [ + entities: list[ + ReolinkSelectEntity | ReolinkHostSelectEntity | ReolinkChimeSelectEntity + ] = [ ReolinkSelectEntity(reolink_data, channel, entity_description) for entity_description in SELECT_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) ] + entities.extend( + ReolinkHostSelectEntity(reolink_data, entity_description) + for entity_description in HOST_SELECT_ENTITIES + if entity_description.supported(reolink_data.host.api) + ) entities.extend( ReolinkChimeSelectEntity(reolink_data, chime, entity_description) for entity_description in CHIME_SELECT_ENTITIES @@ -360,6 +394,33 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): self.async_write_ha_state() +class ReolinkHostSelectEntity(ReolinkHostCoordinatorEntity, SelectEntity): + """Base select entity class for Reolink Host.""" + + entity_description: ReolinkHostSelectEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + entity_description: ReolinkHostSelectEntityDescription, + ) -> None: + """Initialize Reolink select entity.""" + self.entity_description = entity_description + super().__init__(reolink_data) + self._attr_options = entity_description.get_options(self._host.api) + + @property + def current_option(self) -> str | None: + """Return the current option.""" + return self.entity_description.value(self._host.api) + + @raise_translated_error + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.method(self._host.api, option) + self.async_write_ha_state() + + class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): """Base select entity class for Reolink IP cameras.""" diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index ecad555b481..85de03dd1a3 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -107,6 +107,17 @@ SENSORS = ( value=lambda api, ch: BatteryEnum(api.battery_status(ch)).name, supported=lambda api, ch: api.supported(ch, "battery"), ), + ReolinkSensorEntityDescription( + key="day_night_state", + cmd_id=33, + cmd_key="296", + translation_key="day_night_state", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + options=["day", "night", "led_day"], + value=lambda api, ch: api.baichuan.day_night_state(ch), + supported=lambda api, ch: api.supported(ch, "day_night_state"), + ), ) HOST_SENSORS = ( diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 335ed92d32e..9a6db7b5d67 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -8,13 +8,15 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "use_https": "Enable HTTPS", + "baichuan_port": "Basic service port", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" }, "data_description": { "host": "The hostname or IP address of your Reolink device. For example: '192.168.1.25'.", - "port": "The port to connect to the Reolink device. For HTTP normally: '80', for HTTPS normally '443'.", + "port": "The HTTP(s) port to connect to the Reolink device API. For HTTP normally: '80', for HTTPS normally '443'.", "use_https": "Use a HTTPS (SSL) connection to the Reolink device.", + "baichuan_port": "The 'Basic Service Port' to connect to the Reolink device over TCP. Normally '9000' unless manually changed in the Reolink desktop client.", "username": "Username to login to the Reolink device itself. Not the Reolink cloud account.", "password": "Password to login to the Reolink device itself. Not the Reolink cloud account." } @@ -29,7 +31,7 @@ "cannot_connect": "Failed to connect, check the IP address of the camera", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "not_admin": "User needs to be admin, user \"{username}\" has authorisation level \"{userlevel}\"", - "password_incompatible": "Password contains incompatible special character, only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", + "password_incompatible": "Password contains incompatible special character or is too long, maximum 31 characters and only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", "unknown": "[%key:common::config_flow::error::unknown%]", "update_needed": "Failed to login because of outdated firmware, please update the firmware to version {needed_firmware} using the Reolink Download Center: {download_center_url}, currently version {current_firmware} is installed", "webhook_exception": "Home Assistant URL is not available, go to Settings > System > Network > Home Assistant URL and correct the URLs, see {more_info}" @@ -100,7 +102,13 @@ "message": "Error trying to update Reolink firmware: {err}" }, "config_entry_not_ready": { - "message": "Error while trying to setup {host}: {err}" + "message": "Error while trying to set up {host}: {err}" + }, + "update_already_running": { + "message": "Reolink firmware update already running, wait on completion before starting another" + }, + "firmware_rate_limit": { + "message": "Reolink firmware update server reached hourly rate limit: updating can be tried again in 1 hour" } }, "issues": { @@ -127,6 +135,10 @@ "hub_switch_deprecated": { "title": "Reolink Home Hub switches deprecated", "description": "The redundant 'Record', 'Email on event', 'FTP upload', 'Push notifications', and 'Buzzer on event' switches on the Reolink Home Hub are deprecated since the new firmware no longer supports these. Please use the equally named switches under each of the camera devices connected to the Home Hub instead. To remove this issue, please adjust automations accordingly and disable the switch entities mentioned." + }, + "password_too_long": { + "title": "Reolink password too long", + "description": "The password for \"{name}\" is more than 31 characters long, this is no longer compatible with the Reolink API. Please change the password using the Reolink app/client to a password with is shorter than 32 characters. After changing the password, fill in the new password in the Reolink Re-authentication flow to continue using this integration. The latest version of the Reolink app/client also has a password limit of 31 characters." } }, "services": { @@ -335,6 +347,83 @@ "off": "Awake", "on": "Sleeping" } + }, + "crossline_person": { + "name": "Crossline {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "crossline_vehicle": { + "name": "Crossline {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "crossline_dog_cat": { + "name": "Crossline {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_person": { + "name": "Intrusion {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_vehicle": { + "name": "Intrusion {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_dog_cat": { + "name": "Intrusion {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_person": { + "name": "Linger {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_vehicle": { + "name": "Linger {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_dog_cat": { + "name": "Linger {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "forgotten_item": { + "name": "Item forgotten {zone_name}", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "taken_item": { + "name": "Item taken {zone_name}", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } } }, "button": { @@ -479,6 +568,21 @@ "ai_animal_sensitivity": { "name": "AI animal sensitivity" }, + "crossline_sensitivity": { + "name": "AI crossline {zone_name} sensitivity" + }, + "intrusion_sensitivity": { + "name": "AI intrusion {zone_name} sensitivity" + }, + "linger_sensitivity": { + "name": "AI linger {zone_name} sensitivity" + }, + "forgotten_item_sensitivity": { + "name": "AI item forgotten {zone_name} sensitivity" + }, + "taken_item_sensitivity": { + "name": "AI item taken {zone_name} sensitivity" + }, "ai_face_delay": { "name": "AI face delay" }, @@ -497,6 +601,18 @@ "ai_animal_delay": { "name": "AI animal delay" }, + "intrusion_delay": { + "name": "AI intrusion {zone_name} delay" + }, + "linger_delay": { + "name": "AI linger {zone_name} delay" + }, + "forgotten_item_delay": { + "name": "AI item forgotten {zone_name} delay" + }, + "taken_item_delay": { + "name": "AI item taken {zone_name} delay" + }, "auto_quick_reply_time": { "name": "Auto quick reply time" }, @@ -720,6 +836,15 @@ }, "sub_bit_rate": { "name": "Fluent bit rate" + }, + "scene_mode": { + "name": "Scene mode", + "state": { + "off": "[%key:common::state::off%]", + "disarm": "Disarmed", + "home": "Home", + "away": "Away" + } } }, "sensor": { @@ -746,6 +871,14 @@ "chargecomplete": "Charge complete" } }, + "day_night_state": { + "name": "Day night state", + "state": { + "day": "Color", + "night": "Black & white", + "led_day": "Color with floodlight" + } + }, "hdd_storage": { "name": "HDD {hdd_index} storage" }, diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index 0f106c0f2cc..af87a75eece 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -162,6 +162,7 @@ SWITCH_ENTITIES = ( ReolinkSwitchEntityDescription( key="manual_record", cmd_key="GetManualRec", + cmd_id=588, translation_key="manual_record", entity_category=EntityCategory.CONFIG, supported=lambda api, ch: api.supported(ch, "manual_record"), diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 0744d66fb5b..a7c883003b7 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -31,7 +31,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 RESUME_AFTER_INSTALL = 15 @@ -184,6 +184,7 @@ class ReolinkUpdateBaseEntity( f"## Release notes\n\n{new_firmware.release_notes}" ) + @raise_translated_error async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -196,6 +197,8 @@ class ReolinkUpdateBaseEntity( try: await self._host.api.update_firmware(self._channel) except ReolinkError as err: + if err.translation_key: + raise raise HomeAssistantError( translation_domain=DOMAIN, translation_key="firmware_install_error", diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index a5556b66a33..12b4825caeb 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.storage import Store +from homeassistant.helpers.translation import async_get_exception_message from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -97,6 +98,30 @@ def get_device_uid_and_ch( return (device_uid, ch, is_chime) +def check_translation_key(err: ReolinkError) -> str | None: + """Check if the translation key from the upstream library is present.""" + if not err.translation_key: + return None + if async_get_exception_message(DOMAIN, err.translation_key) == err.translation_key: + # translation key not found in strings.json + return None + return err.translation_key + + +_EXCEPTION_TO_TRANSLATION_KEY = { + ApiError: "api_error", + InvalidContentTypeError: "invalid_content_type", + CredentialsInvalidError: "invalid_credentials", + LoginError: "login_error", + NoDataError: "no_data", + UnexpectedDataError: "unexpected_data", + NotSupportedError: "not_supported", + SubscriptionError: "subscription_error", + ReolinkConnectionError: "connection_error", + ReolinkTimeoutError: "timeout", +} + + # Decorators def raise_translated_error[**P, R]( func: Callable[P, Awaitable[R]], @@ -110,73 +135,14 @@ def raise_translated_error[**P, R]( except InvalidParameterError as err: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key="invalid_parameter", - translation_placeholders={"err": str(err)}, - ) from err - except ApiError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="api_error", - translation_placeholders={"err": str(err)}, - ) from err - except InvalidContentTypeError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_content_type", - translation_placeholders={"err": str(err)}, - ) from err - except CredentialsInvalidError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_credentials", - translation_placeholders={"err": str(err)}, - ) from err - except LoginError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="login_error", - translation_placeholders={"err": str(err)}, - ) from err - except NoDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="no_data", - translation_placeholders={"err": str(err)}, - ) from err - except UnexpectedDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="unexpected_data", - translation_placeholders={"err": str(err)}, - ) from err - except NotSupportedError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="not_supported", - translation_placeholders={"err": str(err)}, - ) from err - except SubscriptionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="subscription_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkConnectionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="connection_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkTimeoutError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="timeout", + translation_key=check_translation_key(err) or "invalid_parameter", translation_placeholders={"err": str(err)}, ) from err except ReolinkError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key="unexpected", + translation_key=check_translation_key(err) + or _EXCEPTION_TO_TRANSLATION_KEY.get(type(err), "unexpected"), translation_placeholders={"err": str(err)}, ) from err diff --git a/homeassistant/components/reolink/views.py b/homeassistant/components/reolink/views.py index 1a4585bc997..44265244b18 100644 --- a/homeassistant/components/reolink/views.py +++ b/homeassistant/components/reolink/views.py @@ -83,7 +83,16 @@ class PlaybackProxyView(HomeAssistantView): _LOGGER.warning("Reolink playback proxy error: %s", str(err)) return web.Response(body=str(err), status=HTTPStatus.BAD_REQUEST) + headers = dict(request.headers) + headers.pop("Host", None) + headers.pop("Referer", None) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "Requested Playback Proxy Method %s, Headers: %s", + request.method, + headers, + ) _LOGGER.debug( "Opening VOD stream from %s: %s", host.api.camera_name(ch), @@ -93,6 +102,7 @@ class PlaybackProxyView(HomeAssistantView): try: reolink_response = await self.session.get( reolink_url, + headers=headers, timeout=ClientTimeout( connect=15, sock_connect=15, sock_read=5, total=None ), @@ -118,18 +128,25 @@ class PlaybackProxyView(HomeAssistantView): ]: err_str = f"Reolink playback expected video/mp4 but got {reolink_response.content_type}" _LOGGER.error(err_str) + if reolink_response.content_type == "text/html": + text = await reolink_response.text() + _LOGGER.debug(text) return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST) - response = web.StreamResponse( - status=200, - reason="OK", - headers={ - "Content-Type": "video/mp4", - }, + response_headers = dict(reolink_response.headers) + _LOGGER.debug( + "Response Playback Proxy Status %s:%s, Headers: %s", + reolink_response.status, + reolink_response.reason, + response_headers, ) + response_headers["Content-Type"] = "video/mp4" - if reolink_response.content_length is not None: - response.content_length = reolink_response.content_length + response = web.StreamResponse( + status=reolink_response.status, + reason=reolink_response.reason, + headers=response_headers, + ) await response.prepare(request) @@ -141,7 +158,8 @@ class PlaybackProxyView(HomeAssistantView): "Timeout while reading Reolink playback from %s, writing EOF", host.api.nvr_name, ) + finally: + reolink_response.release() - reolink_response.release() await response.write_eof() return response diff --git a/homeassistant/components/rflink/sensor.py b/homeassistant/components/rflink/sensor.py index 027c39da70f..97d0b811509 100644 --- a/homeassistant/components/rflink/sensor.py +++ b/homeassistant/components/rflink/sensor.py @@ -236,7 +236,8 @@ SENSOR_TYPES = ( key="winddirection", name="Wind direction", icon="mdi:compass", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), SensorEntityDescription( diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index 4b256279445..6669b1367df 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -161,7 +161,8 @@ SENSOR_TYPES = ( RfxtrxSensorEntityDescription( key="Wind direction", translation_key="wind_direction", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), RfxtrxSensorEntityDescription( diff --git a/homeassistant/components/risco/const.py b/homeassistant/components/risco/const.py index 078e26c43b5..ef3280fe232 100644 --- a/homeassistant/components/risco/const.py +++ b/homeassistant/components/risco/const.py @@ -30,9 +30,9 @@ RISCO_ARM = "arm" RISCO_PARTIAL_ARM = "partial_arm" RISCO_STATES = [RISCO_ARM, RISCO_PARTIAL_ARM, *RISCO_GROUPS] -DEFAULT_RISCO_GROUPS_TO_HA = { - group: AlarmControlPanelState.ARMED_HOME for group in RISCO_GROUPS -} +DEFAULT_RISCO_GROUPS_TO_HA = dict.fromkeys( + RISCO_GROUPS, AlarmControlPanelState.ARMED_HOME +) DEFAULT_RISCO_STATES_TO_HA = { RISCO_ARM: AlarmControlPanelState.ARMED_AWAY, RISCO_PARTIAL_ARM: AlarmControlPanelState.ARMED_HOME, diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index 955e50cd15b..8140b58b86c 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -23,6 +23,8 @@ from roborock.web_api import RoborockApiClient from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_BASE_URL, CONF_USER_DATA, DOMAIN, PLATFORMS from .coordinator import ( @@ -44,7 +46,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> entry.async_on_unload(entry.add_update_listener(update_listener)) user_data = UserData.from_dict(entry.data[CONF_USER_DATA]) - api_client = RoborockApiClient(entry.data[CONF_USERNAME], entry.data[CONF_BASE_URL]) + api_client = RoborockApiClient( + entry.data[CONF_USERNAME], + entry.data[CONF_BASE_URL], + session=async_get_clientsession(hass), + ) _LOGGER.debug("Getting home data") try: home_data = await api_client.get_home_data_v2(user_data) @@ -111,6 +117,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> translation_key="no_coordinators", ) valid_coordinators = RoborockCoordinators(v1_coords, a01_coords) + await asyncio.gather( + *(coord.refresh_coordinator_map() for coord in valid_coordinators.v1) + ) async def on_stop(_: Any) -> None: _LOGGER.debug("Shutting down roborock") @@ -131,6 +140,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + device_registry = dr.async_get(hass) + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry_id=entry.entry_id + ) + for device in device_entries: + # Remove any devices that are no longer in the account. + # The API returns all devices, even if they are offline + device_duids = { + identifier[1].replace("_dock", "") for identifier in device.identifiers + } + if any(device_duid in device_map for device_duid in device_duids): + continue + _LOGGER.info( + "Removing device: %s because it is no longer exists in your account", + device.name, + ) + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=entry.entry_id, + ) + return True diff --git a/homeassistant/components/roborock/binary_sensor.py b/homeassistant/components/roborock/binary_sensor.py index f2b1564c7b5..a2c34f5c59d 100644 --- a/homeassistant/components/roborock/binary_sensor.py +++ b/homeassistant/components/roborock/binary_sensor.py @@ -20,12 +20,16 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockBinarySensorDescription(BinarySensorEntityDescription): """A class that describes Roborock binary sensors.""" value_fn: Callable[[DeviceProp], bool | int | None] + # If it is a dock entity + is_dock_entity: bool = False BINARY_SENSOR_DESCRIPTIONS = [ @@ -35,6 +39,7 @@ BINARY_SENSOR_DESCRIPTIONS = [ device_class=BinarySensorDeviceClass.RUNNING, entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda data: data.status.dry_status, + is_dock_entity=True, ), RoborockBinarySensorDescription( key="water_box_carriage_status", @@ -105,6 +110,7 @@ class RoborockBinarySensorEntity(RoborockCoordinatedEntityV1, BinarySensorEntity super().__init__( f"{description.key}_{coordinator.duid_slug}", coordinator, + is_dock_entity=description.is_dock_entity, ) self.entity_description = description diff --git a/homeassistant/components/roborock/button.py b/homeassistant/components/roborock/button.py index f0f0d7beea2..fea38524fe0 100644 --- a/homeassistant/components/roborock/button.py +++ b/homeassistant/components/roborock/button.py @@ -17,6 +17,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockEntity, RoborockEntityV1 +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockButtonDescription(ButtonEntityDescription): diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 1a6b67286bb..1a359faca10 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -21,14 +21,17 @@ import voluptuous as vol from homeassistant.config_entries import ( SOURCE_REAUTH, - ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) from homeassistant.const import CONF_USERNAME from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo +from . import RoborockConfigEntry from .const import ( CONF_BASE_URL, CONF_ENTRY_CODE, @@ -63,7 +66,9 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured(error="already_configured_account") self._username = username _LOGGER.debug("Requesting code for Roborock account") - self._client = RoborockApiClient(username) + self._client = RoborockApiClient( + username, session=async_get_clientsession(self.hass) + ) errors = await self._request_code() if not errors: return await self.async_step_code() @@ -134,13 +139,31 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle a flow started by a dhcp discovery.""" + device_registry = dr.async_get(self.hass) + device = device_registry.async_get_device( + connections={ + (dr.CONNECTION_NETWORK_MAC, dr.format_mac(discovery_info.macaddress)) + } + ) + if device is not None and any( + identifier[0] == DOMAIN for identifier in device.identifiers + ): + return self.async_abort(reason="already_configured") + return await self.async_step_user() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._username = entry_data[CONF_USERNAME] assert self._username - self._client = RoborockApiClient(self._username) + self._client = RoborockApiClient( + self._username, session=async_get_clientsession(self.hass) + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -170,7 +193,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RoborockConfigEntry, ) -> RoborockOptionsFlowHandler: """Create the options flow.""" return RoborockOptionsFlowHandler(config_entry) @@ -179,7 +202,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self, config_entry: RoborockConfigEntry) -> None: """Initialize options flow.""" self.options = deepcopy(dict(config_entry.options)) diff --git a/homeassistant/components/roborock/const.py b/homeassistant/components/roborock/const.py index cc8d34fbadc..e56fade7078 100644 --- a/homeassistant/components/roborock/const.py +++ b/homeassistant/components/roborock/const.py @@ -1,5 +1,7 @@ """Constants for Roborock.""" +from datetime import timedelta + from vacuum_map_parser_base.config.drawable import Drawable from homeassistant.const import Platform @@ -43,13 +45,21 @@ PLATFORMS = [ Platform.VACUUM, ] - -IMAGE_CACHE_INTERVAL = 90 +# This can be lowered in the future if we do not receive rate limiting issues. +IMAGE_CACHE_INTERVAL = timedelta(seconds=30) MAP_SLEEP = 3 GET_MAPS_SERVICE_NAME = "get_maps" +MAP_SCALE = 4 MAP_FILE_FORMAT = "PNG" MAP_FILENAME_SUFFIX = ".png" SET_VACUUM_GOTO_POSITION_SERVICE_NAME = "set_vacuum_goto_position" GET_VACUUM_CURRENT_POSITION_SERVICE_NAME = "get_vacuum_current_position" + + +A01_UPDATE_INTERVAL = timedelta(minutes=1) +V1_CLOUD_IN_CLEANING_INTERVAL = timedelta(seconds=30) +V1_CLOUD_NOT_CLEANING_INTERVAL = timedelta(minutes=1) +V1_LOCAL_IN_CLEANING_INTERVAL = timedelta(seconds=15) +V1_LOCAL_NOT_CLEANING_INTERVAL = timedelta(seconds=30) diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 1ab23fc927a..cc0bee1cd5f 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from dataclasses import dataclass from datetime import timedelta +import io import logging from propcache.api import cached_property @@ -25,6 +26,11 @@ from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClient from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1 from roborock.version_a01_apis import RoborockClientA01 from roborock.web_api import RoborockApiClient +from vacuum_map_parser_base.config.color import ColorsPalette +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.config.size import Sizes +from vacuum_map_parser_base.map_data import MapData +from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_CONNECTIONS @@ -34,9 +40,22 @@ from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util import slugify +from homeassistant.util import dt as dt_util, slugify -from .const import DOMAIN +from .const import ( + A01_UPDATE_INTERVAL, + DEFAULT_DRAWABLES, + DOMAIN, + DRAWABLES, + IMAGE_CACHE_INTERVAL, + MAP_FILE_FORMAT, + MAP_SCALE, + MAP_SLEEP, + V1_CLOUD_IN_CLEANING_INTERVAL, + V1_CLOUD_NOT_CLEANING_INTERVAL, + V1_LOCAL_IN_CLEANING_INTERVAL, + V1_LOCAL_NOT_CLEANING_INTERVAL, +) from .models import RoborockA01HassDeviceInfo, RoborockHassDeviceInfo, RoborockMapInfo from .roborock_storage import RoborockMapStorage @@ -85,7 +104,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): _LOGGER, config_entry=config_entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + # Assume we can use the local api. + update_interval=V1_LOCAL_NOT_CLEANING_INTERVAL, ) self.roborock_device_info = RoborockHassDeviceInfo( device, @@ -109,7 +129,9 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): self.current_map: int | None = None if mac := self.roborock_device_info.network_info.mac: - self.device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, mac)} + self.device_info[ATTR_CONNECTIONS] = { + (dr.CONNECTION_NETWORK_MAC, dr.format_mac(mac)) + } # Maps from map flag to map name self.maps: dict[int, RoborockMapInfo] = {} self._home_data_rooms = {str(room.id): room.name for room in home_data_rooms} @@ -118,6 +140,51 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): ) self._user_data = user_data self._api_client = api_client + self._is_cloud_api = False + drawables = [ + drawable + for drawable, default_value in DEFAULT_DRAWABLES.items() + if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value) + ] + self.map_parser = RoborockMapDataParser( + ColorsPalette(), + Sizes({k: v * MAP_SCALE for k, v in Sizes.SIZES.items()}), + drawables, + ImageConfig(scale=MAP_SCALE), + [], + ) + + @cached_property + def dock_device_info(self) -> DeviceInfo: + """Gets the device info for the dock. + + This must happen after the coordinator does the first update. + Which will be the case when this is called. + """ + dock_type = self.roborock_device_info.props.status.dock_type + return DeviceInfo( + name=f"{self.roborock_device_info.device.name} Dock", + identifiers={(DOMAIN, f"{self.duid}_dock")}, + manufacturer="Roborock", + model=f"{self.roborock_device_info.product.model} Dock", + model_id=str(dock_type.value) if dock_type is not None else "Unknown", + sw_version=self.roborock_device_info.device.fv, + ) + + def parse_map_data_v1( + self, map_bytes: bytes + ) -> tuple[bytes | None, MapData | None]: + """Parse map_bytes and return MapData and the image.""" + try: + parsed_map = self.map_parser.parse(map_bytes) + except (IndexError, ValueError) as err: + _LOGGER.debug("Exception when parsing map contents: %s", err) + return None, None + if parsed_map.image is None: + return None, None + img_byte_arr = io.BytesIO() + parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT) + return img_byte_arr.getvalue(), parsed_map async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -128,17 +195,68 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): try: maps = await self.api.get_multi_maps_list() except RoborockException as err: - raise UpdateFailed("Failed to get map data: {err}") from err + _LOGGER.debug("Failed to get maps: %s", err) + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="map_failure", + translation_placeholders={"error": str(err)}, + ) from err # Rooms names populated later with calls to `set_current_map_rooms` for each map + roborock_maps = maps.map_info if (maps and maps.map_info) else () + stored_images = await asyncio.gather( + *[ + self.map_storage.async_load_map(roborock_map.mapFlag) + for roborock_map in roborock_maps + ] + ) self.maps = { roborock_map.mapFlag: RoborockMapInfo( flag=roborock_map.mapFlag, name=roborock_map.name or f"Map {roborock_map.mapFlag}", rooms={}, + image=image, + last_updated=dt_util.utcnow() - IMAGE_CACHE_INTERVAL, + map_data=None, ) - for roborock_map in (maps.map_info if (maps and maps.map_info) else ()) + for image, roborock_map in zip(stored_images, roborock_maps, strict=False) } + async def update_map(self) -> None: + """Update the currently selected map.""" + # The current map was set in the props update, so these can be done without + # worry of applying them to the wrong map. + if self.current_map is None: + # This exists as a safeguard/ to keep mypy happy. + return + try: + response = await self.cloud_api.get_map_v1() + except RoborockException as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) from ex + if not isinstance(response, bytes): + _LOGGER.debug("Failed to parse map contents: %s", response) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) + parsed_image, parsed_map = self.parse_map_data_v1(response) + if parsed_image is None or parsed_map is None: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) + current_roborock_map_info = self.maps[self.current_map] + if parsed_image != self.maps[self.current_map].image: + await self.map_storage.async_save_map( + self.current_map, + parsed_image, + ) + current_roborock_map_info.image = parsed_image + current_roborock_map_info.last_updated = dt_util.utcnow() + current_roborock_map_info.map_data = parsed_map + async def _verify_api(self) -> None: """Verify that the api is reachable. If it is not, switch clients.""" if isinstance(self.api, RoborockLocalClientV1): @@ -152,6 +270,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): await self.api.async_disconnect() # We use the cloud api if the local api fails to connect. self.api = self.cloud_api + self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL + self._is_cloud_api = True # Right now this should never be called if the cloud api is the primary api, # but in the future if it is, a new else should be added. @@ -171,16 +291,45 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): async def _async_update_data(self) -> DeviceProp: """Update data via library.""" + previous_state = self.roborock_device_info.props.status.state_name try: # Update device props and standard api information await self._update_device_prop() # Set the new map id from the updated device props self._set_current_map() # Get the rooms for that map id. + + # If the vacuum is currently cleaning and it has been IMAGE_CACHE_INTERVAL + # since the last map update, you can update the map. + new_status = self.roborock_device_info.props.status + if self.current_map is not None and ( + ( + new_status.in_cleaning + and (dt_util.utcnow() - self.maps[self.current_map].last_updated) + > IMAGE_CACHE_INTERVAL + ) + or previous_state != new_status.state_name + ): + try: + await self.update_map() + except HomeAssistantError as err: + _LOGGER.debug("Failed to update map: %s", err) await self.set_current_map_rooms() except RoborockException as ex: _LOGGER.debug("Failed to update data: %s", ex) - raise UpdateFailed(ex) from ex + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_data_fail", + ) from ex + if self.roborock_device_info.props.status.in_cleaning: + if self._is_cloud_api: + self.update_interval = V1_CLOUD_IN_CLEANING_INTERVAL + else: + self.update_interval = V1_LOCAL_IN_CLEANING_INTERVAL + elif self._is_cloud_api: + self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL + else: + self.update_interval = V1_LOCAL_NOT_CLEANING_INTERVAL return self.roborock_device_info.props def _set_current_map(self) -> None: @@ -245,6 +394,43 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): """Get the slug of the duid.""" return slugify(self.duid) + async def refresh_coordinator_map(self) -> None: + """Get the starting map information for all maps for this device. + + The following steps must be done synchronously. + Only one map can be loaded at a time per device. + """ + cur_map = self.current_map + # This won't be None at this point as the coordinator will have run first. + if cur_map is None: + # If we don't have a cur map(shouldn't happen) just + # return as we can't do anything. + return + map_flags = sorted(self.maps, key=lambda data: data == cur_map, reverse=True) + for map_flag in map_flags: + if map_flag != cur_map: + # Only change the map and sleep if we have multiple maps. + await self.api.load_multi_map(map_flag) + self.current_map = map_flag + # We cannot get the map until the roborock servers fully process the + # map change. + await asyncio.sleep(MAP_SLEEP) + tasks = [self.set_current_map_rooms()] + # The image is set within async_setup, so if it exists, we have it here. + if self.maps[map_flag].image is None: + # If we don't have a cached map, let's update it here so that it can be + # cached in the future. + tasks.append(self.update_map()) + # If either of these fail, we don't care, and we want to continue. + await asyncio.gather(*tasks, return_exceptions=True) + + if len(self.maps) != 1: + # Set the map back to the map the user previously had selected so that it + # does not change the end user's app. + # Only needs to happen when we changed maps above. + await self.api.load_multi_map(cur_map) + self.current_map = cur_map + class RoborockDataUpdateCoordinatorA01( DataUpdateCoordinator[ @@ -269,7 +455,7 @@ class RoborockDataUpdateCoordinatorA01( _LOGGER, config_entry=config_entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=A01_UPDATE_INTERVAL, ) self.api = api self.device_info = DeviceInfo( diff --git a/homeassistant/components/roborock/entity.py b/homeassistant/components/roborock/entity.py index d417ac17159..404f239c93a 100644 --- a/homeassistant/components/roborock/entity.py +++ b/homeassistant/components/roborock/entity.py @@ -121,12 +121,15 @@ class RoborockCoordinatedEntityV1( listener_request: list[RoborockDataProtocol] | RoborockDataProtocol | None = None, + is_dock_entity: bool = False, ) -> None: """Initialize the coordinated Roborock Device.""" RoborockEntityV1.__init__( self, unique_id=unique_id, - device_info=coordinator.device_info, + device_info=coordinator.device_info + if not is_dock_entity + else coordinator.dock_device_info, api=coordinator.api, ) CoordinatorEntity.__init__(self, coordinator=coordinator) diff --git a/homeassistant/components/roborock/image.py b/homeassistant/components/roborock/image.py index 66088d6453c..d1c19331ba4 100644 --- a/homeassistant/components/roborock/image.py +++ b/homeassistant/components/roborock/image.py @@ -1,38 +1,21 @@ """Support for Roborock image.""" -import asyncio -from collections.abc import Callable from datetime import datetime -import io import logging -from roborock import RoborockCommand -from vacuum_map_parser_base.config.color import ColorsPalette -from vacuum_map_parser_base.config.image_config import ImageConfig -from vacuum_map_parser_base.config.size import Sizes -from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser - from homeassistant.components.image import ImageEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.util import dt as dt_util -from .const import ( - DEFAULT_DRAWABLES, - DOMAIN, - DRAWABLES, - IMAGE_CACHE_INTERVAL, - MAP_FILE_FORMAT, - MAP_SLEEP, -) from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -41,30 +24,6 @@ async def async_setup_entry( ) -> None: """Set up Roborock image platform.""" - drawables = [ - drawable - for drawable, default_value in DEFAULT_DRAWABLES.items() - if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value) - ] - parser = RoborockMapDataParser( - ColorsPalette(), Sizes(), drawables, ImageConfig(), [] - ) - - def parse_image(map_bytes: bytes) -> bytes | None: - try: - parsed_map = parser.parse(map_bytes) - except (IndexError, ValueError) as err: - _LOGGER.debug("Exception when parsing map contents: %s", err) - return None - if parsed_map.image is None: - return None - img_byte_arr = io.BytesIO() - parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT) - return img_byte_arr.getvalue() - - await asyncio.gather( - *(refresh_coordinators(hass, coord) for coord in config_entry.runtime_data.v1) - ) async_add_entities( ( RoborockMap( @@ -73,7 +32,6 @@ async def async_setup_entry( coord, map_info.flag, map_info.name, - parse_image, ) for coord in config_entry.runtime_data.v1 for map_info in coord.maps.values() @@ -95,14 +53,12 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity): coordinator: RoborockDataUpdateCoordinator, map_flag: int, map_name: str, - parser: Callable[[bytes], bytes | None], ) -> None: """Initialize a Roborock map.""" RoborockCoordinatedEntityV1.__init__(self, unique_id, coordinator) ImageEntity.__init__(self, coordinator.hass) self.config_entry = config_entry self._attr_name = map_name - self.parser = parser self.map_flag = map_flag self.cached_map = b"" self._attr_entity_category = EntityCategory.DIAGNOSTIC @@ -112,90 +68,22 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity): """Return if this map is the currently selected map.""" return self.map_flag == self.coordinator.current_map - def is_map_valid(self) -> bool: - """Update the map if it is valid. - - Update this map if it is the currently active map, and the - vacuum is cleaning, or if it has never been set at all. - """ - return self.cached_map == b"" or ( - self.is_selected - and self.image_last_updated is not None - and self.coordinator.roborock_device_info.props.status is not None - and bool(self.coordinator.roborock_device_info.props.status.in_cleaning) - ) - async def async_added_to_hass(self) -> None: """When entity is added to hass load any previously cached maps from disk.""" await super().async_added_to_hass() - content = await self.coordinator.map_storage.async_load_map(self.map_flag) - self.cached_map = content or b"" - self._attr_image_last_updated = dt_util.utcnow() + self._attr_image_last_updated = self.coordinator.maps[ + self.map_flag + ].last_updated self.async_write_ha_state() def _handle_coordinator_update(self) -> None: - # Bump last updated every third time the coordinator runs, so that async_image - # will be called and we will evaluate on the new coordinator data if we should - # update the cache. - if ( - dt_util.utcnow() - self.image_last_updated - ).total_seconds() > IMAGE_CACHE_INTERVAL and self.is_map_valid(): - self._attr_image_last_updated = dt_util.utcnow() + # If the coordinator has updated the map, we can update the image. + self._attr_image_last_updated = self.coordinator.maps[ + self.map_flag + ].last_updated + super()._handle_coordinator_update() async def async_image(self) -> bytes | None: - """Update the image if it is not cached.""" - if self.is_map_valid(): - response = await asyncio.gather( - *( - self.cloud_api.get_map_v1(), - self.coordinator.set_current_map_rooms(), - ), - return_exceptions=True, - ) - if ( - not isinstance(response[0], bytes) - or (content := self.parser(response[0])) is None - ): - _LOGGER.debug("Failed to parse map contents: %s", response[0]) - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="map_failure", - ) - if self.cached_map != content: - self.cached_map = content - await self.coordinator.map_storage.async_save_map( - self.map_flag, - content, - ) - return self.cached_map - - -async def refresh_coordinators( - hass: HomeAssistant, coord: RoborockDataUpdateCoordinator -) -> None: - """Get the starting map information for all maps for this device. - - The following steps must be done synchronously. - Only one map can be loaded at a time per device. - """ - cur_map = coord.current_map - # This won't be None at this point as the coordinator will have run first. - assert cur_map is not None - map_flags = sorted(coord.maps, key=lambda data: data == cur_map, reverse=True) - for map_flag in map_flags: - if map_flag != cur_map: - # Only change the map and sleep if we have multiple maps. - await coord.api.send_command(RoborockCommand.LOAD_MULTI_MAP, [map_flag]) - coord.current_map = map_flag - # We cannot get the map until the roborock servers fully process the - # map change. - await asyncio.sleep(MAP_SLEEP) - await coord.set_current_map_rooms() - - if len(coord.maps) != 1: - # Set the map back to the map the user previously had selected so that it - # does not change the end user's app. - # Only needs to happen when we changed maps above. - await coord.cloud_api.send_command(RoborockCommand.LOAD_MULTI_MAP, [cur_map]) - coord.current_map = cur_map + """Get the cached image.""" + return self.coordinator.maps[self.map_flag].image diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index db2654d4baa..531590d5d6e 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -3,11 +3,23 @@ "name": "Roborock", "codeowners": ["@Lash-L", "@allenporter"], "config_flow": true, + "dhcp": [ + { + "macaddress": "249E7D*" + }, + { + "macaddress": "B04A39*" + }, + { + "hostname": "roborock-*" + } + ], "documentation": "https://www.home-assistant.io/integrations/roborock", "iot_class": "local_polling", "loggers": ["roborock"], + "quality_scale": "silver", "requirements": [ - "python-roborock==2.11.1", + "python-roborock==2.16.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/homeassistant/components/roborock/models.py b/homeassistant/components/roborock/models.py index 4b8ab43b4a1..ab40f23d574 100644 --- a/homeassistant/components/roborock/models.py +++ b/homeassistant/components/roborock/models.py @@ -1,10 +1,12 @@ """Roborock Models.""" from dataclasses import dataclass +from datetime import datetime from typing import Any from roborock.containers import HomeDataDevice, HomeDataProduct, NetworkInfo from roborock.roborock_typing import DeviceProp +from vacuum_map_parser_base.map_data import MapData @dataclass @@ -48,3 +50,13 @@ class RoborockMapInfo: flag: int name: str rooms: dict[int, str] + image: bytes | None + last_updated: datetime + map_data: MapData | None + + @property + def current_room(self) -> str | None: + """Get the currently active room for this map if any.""" + if self.map_data is None or self.map_data.vacuum_room is None: + return None + return self.rooms.get(self.map_data.vacuum_room) diff --git a/homeassistant/components/roborock/number.py b/homeassistant/components/roborock/number.py index a710eeefb90..73ac14fca71 100644 --- a/homeassistant/components/roborock/number.py +++ b/homeassistant/components/roborock/number.py @@ -22,6 +22,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockNumberDescription(NumberEntityDescription): diff --git a/homeassistant/components/roborock/quality_scale.yaml b/homeassistant/components/roborock/quality_scale.yaml index 845d77d0fbe..32ddb145f90 100644 --- a/homeassistant/components/roborock/quality_scale.yaml +++ b/homeassistant/components/roborock/quality_scale.yaml @@ -1,28 +1,16 @@ rules: # Bronze action-setup: done - appropriate-polling: - status: todo - comment: | - The device currently polls every 30 seconds, which is a bit high when idle. - We should consider dynamic polling intervals (e.g. when cleaning) and - separate cloud vs local intervals. + appropriate-polling: done brands: done common-modules: done - config-flow: - status: todo - comment: Not all fields have a data_description. + config-flow: done config-flow-test-coverage: done dependency-transparency: done - docs-actions: - status: todo - comment: | - The documentation for `roborock.get_maps` should be updated so it is next - to the other actions rather than only an example. All actions should be - updated to use the simple table format. + docs-actions: done docs-high-level-description: done - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: done entity-unique-id: done has-entity-name: done @@ -33,39 +21,30 @@ rules: test-before-setup: done unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done - docs-configuration-parameters: todo - docs-installation-parameters: todo + docs-configuration-parameters: done + docs-installation-parameters: done entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo - reauthentication-flow: todo + parallel-updates: done + reauthentication-flow: done test-coverage: done # Gold devices: done diagnostics: done - discovery: - status: todo - comment: Determine if these devices can support discovery + discovery: done discovery-update-info: status: exempt comment: Devices do not support discovery. - docs-data-update: - status: todo - comment: | - The docs talk about device communication works (cloud vs local), but does - not yet describe data flow (e.g. polling). We should move into a separate - section. - docs-examples: todo + docs-data-update: done + docs-examples: done docs-known-limitations: status: todo comment: Documentation does not describe known limitations like rate limiting docs-supported-devices: todo - docs-supported-functions: - status: todo - comment: Mostly complete, but some documentation is outdated (e.g. maps/images) + docs-supported-functions: done docs-troubleshooting: status: todo comment: | @@ -83,20 +62,14 @@ rules: status: exempt comment: There are no noisy entities. entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: todo reconfiguration-flow: todo repair-issues: status: todo comment: The Cloud vs Local API warning should probably be a repair issue. - stale-devices: - status: todo - comment: | - The integration does not yet handle stale devices. The roborock app does - support deleting devices and this is a gap #132590 + stale-devices: done # Platinum async-dependency: todo - inject-websession: - status: todo - comment: Web API uses aiohttp but does not yet inject web session. - strict-typing: todo + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index 6133eed0652..208020dccab 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -4,9 +4,9 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass -from roborock.containers import Status +from roborock.code_mappings import RoborockDockDustCollectionModeCode from roborock.roborock_message import RoborockDataProtocol -from roborock.roborock_typing import RoborockCommand +from roborock.roborock_typing import DeviceProp, RoborockCommand from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory @@ -17,6 +17,8 @@ from .const import MAP_SLEEP from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockSelectDescription(SelectEntityDescription): @@ -25,13 +27,15 @@ class RoborockSelectDescription(SelectEntityDescription): # The command that the select entity will send to the api. api_command: RoborockCommand # Gets the current value of the select entity. - value_fn: Callable[[Status], str | None] + value_fn: Callable[[DeviceProp], str | None] # Gets all options of the select entity. - options_lambda: Callable[[Status], list[str] | None] + options_lambda: Callable[[DeviceProp], list[str] | None] # Takes the value from the select entity and converts it for the api. - parameter_lambda: Callable[[str, Status], list[int]] + parameter_lambda: Callable[[str, DeviceProp], list[int]] protocol_listener: RoborockDataProtocol | None = None + # If it is a dock entity + is_dock_entity: bool = False SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ @@ -39,24 +43,38 @@ SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ key="water_box_mode", translation_key="mop_intensity", api_command=RoborockCommand.SET_WATER_BOX_CUSTOM_MODE, - value_fn=lambda data: data.water_box_mode_name, + value_fn=lambda data: data.status.water_box_mode_name, entity_category=EntityCategory.CONFIG, - options_lambda=lambda data: data.water_box_mode.keys() - if data.water_box_mode is not None + options_lambda=lambda data: data.status.water_box_mode.keys() + if data.status.water_box_mode is not None else None, - parameter_lambda=lambda key, status: [status.get_mop_intensity_code(key)], + parameter_lambda=lambda key, prop: [prop.status.get_mop_intensity_code(key)], protocol_listener=RoborockDataProtocol.WATER_BOX_MODE, ), RoborockSelectDescription( key="mop_mode", translation_key="mop_mode", api_command=RoborockCommand.SET_MOP_MODE, - value_fn=lambda data: data.mop_mode_name, + value_fn=lambda data: data.status.mop_mode_name, entity_category=EntityCategory.CONFIG, - options_lambda=lambda data: data.mop_mode.keys() - if data.mop_mode is not None + options_lambda=lambda data: data.status.mop_mode.keys() + if data.status.mop_mode is not None else None, - parameter_lambda=lambda key, status: [status.get_mop_mode_code(key)], + parameter_lambda=lambda key, prop: [prop.status.get_mop_mode_code(key)], + ), + RoborockSelectDescription( + key="dust_collection_mode", + translation_key="dust_collection_mode", + api_command=RoborockCommand.SET_DUST_COLLECTION_MODE, + value_fn=lambda data: data.dust_collection_mode_name, + entity_category=EntityCategory.CONFIG, + options_lambda=lambda data: RoborockDockDustCollectionModeCode.keys() + if data.dust_collection_mode_name is not None + else None, + parameter_lambda=lambda key, _: [ + RoborockDockDustCollectionModeCode.as_dict().get(key) + ], + is_dock_entity=True, ), ] @@ -74,7 +92,7 @@ async def async_setup_entry( for description in SELECT_DESCRIPTIONS if ( options := description.options_lambda( - coordinator.roborock_device_info.props.status + coordinator.roborock_device_info.props ) ) is not None @@ -104,6 +122,7 @@ class RoborockSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): f"{entity_description.key}_{coordinator.duid_slug}", coordinator, entity_description.protocol_listener, + is_dock_entity=entity_description.is_dock_entity, ) self._attr_options = options @@ -111,27 +130,28 @@ class RoborockSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): """Set the option.""" await self.send( self.entity_description.api_command, - self.entity_description.parameter_lambda(option, self._device_status), + self.entity_description.parameter_lambda(option, self.coordinator.data), ) @property def current_option(self) -> str | None: - """Get the current status of the select entity from device_status.""" - return self.entity_description.value_fn(self._device_status) + """Get the current status of the select entity from device props.""" + return self.entity_description.value_fn(self.coordinator.data) class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): """A class to let you set the selected map on Roborock vacuum.""" - _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_entity_category = EntityCategory.CONFIG _attr_translation_key = "selected_map" async def async_select_option(self, option: str) -> None: """Set the option.""" for map_id, map_ in self.coordinator.maps.items(): if map_.name == option: - await self.send( + await self._send_command( RoborockCommand.LOAD_MULTI_MAP, + self.api, [map_id], ) # Update the current map id manually so that nothing gets broken @@ -140,6 +160,7 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): # We need to wait after updating the map # so that other commands will be executed correctly. await asyncio.sleep(MAP_SLEEP) + await self.coordinator.async_refresh() break @property diff --git a/homeassistant/components/roborock/sensor.py b/homeassistant/components/roborock/sensor.py index f95dc5fa98f..33ecaf74d4f 100644 --- a/homeassistant/components/roborock/sensor.py +++ b/homeassistant/components/roborock/sensor.py @@ -36,7 +36,13 @@ from .coordinator import ( RoborockDataUpdateCoordinator, RoborockDataUpdateCoordinatorA01, ) -from .entity import RoborockCoordinatedEntityA01, RoborockCoordinatedEntityV1 +from .entity import ( + RoborockCoordinatedEntityA01, + RoborockCoordinatedEntityV1, + RoborockEntity, +) + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -47,6 +53,9 @@ class RoborockSensorDescription(SensorEntityDescription): protocol_listener: RoborockDataProtocol | None = None + # If it is a dock entity + is_dock_entity: bool = False + @dataclass(frozen=True, kw_only=True) class RoborockSensorDescriptionA01(SensorEntityDescription): @@ -197,6 +206,7 @@ SENSOR_DESCRIPTIONS = [ entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.ENUM, options=RoborockDockErrorCode.keys(), + is_dock_entity=True, ), RoborockSensorDescription( key="mop_clean_remaining", @@ -205,6 +215,7 @@ SENSOR_DESCRIPTIONS = [ value_fn=lambda data: data.status.rdt, translation_key="mop_drying_remaining_time", entity_category=EntityCategory.DIAGNOSTIC, + is_dock_entity=True, ), ] @@ -299,7 +310,7 @@ async def async_setup_entry( ) -> None: """Set up the Roborock vacuum sensors.""" coordinators = config_entry.runtime_data - async_add_entities( + entities: list[RoborockEntity] = [ RoborockSensorEntity( coordinator, description, @@ -307,8 +318,9 @@ async def async_setup_entry( for coordinator in coordinators.v1 for description in SENSOR_DESCRIPTIONS if description.value_fn(coordinator.roborock_device_info.props) is not None - ) - async_add_entities( + ] + entities.extend(RoborockCurrentRoom(coordinator) for coordinator in coordinators.v1) + entities.extend( RoborockSensorEntityA01( coordinator, description, @@ -317,6 +329,7 @@ async def async_setup_entry( for description in A01_SENSOR_DESCRIPTIONS if description.data_protocol in coordinator.data ) + async_add_entities(entities) class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): @@ -335,6 +348,7 @@ class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): f"{description.key}_{coordinator.duid_slug}", coordinator, description.protocol_listener, + is_dock_entity=description.is_dock_entity, ) @property @@ -345,6 +359,42 @@ class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): ) +class RoborockCurrentRoom(RoborockCoordinatedEntityV1, SensorEntity): + """Representation of a Current Room Sensor.""" + + _attr_device_class = SensorDeviceClass.ENUM + _attr_translation_key = "current_room" + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: RoborockDataUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__( + f"current_room_{coordinator.duid_slug}", + coordinator, + None, + is_dock_entity=False, + ) + + @property + def options(self) -> list[str]: + """Return the currently valid rooms.""" + if self.coordinator.current_map is not None: + return list( + self.coordinator.maps[self.coordinator.current_map].rooms.values() + ) + return [] + + @property + def native_value(self) -> str | None: + """Return the value reported by the sensor.""" + if self.coordinator.current_map is not None: + return self.coordinator.maps[self.coordinator.current_map].current_room + return None + + class RoborockSensorEntityA01(RoborockCoordinatedEntityA01, SensorEntity): """Representation of a A01 Roborock sensor.""" diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index eb058ea74e3..caad67e4ce6 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -5,12 +5,18 @@ "description": "Enter your Roborock email address.", "data": { "username": "[%key:common::config_flow::data::email%]" + }, + "data_description": { + "username": "The email address used to sign in to the Roborock app." } }, "code": { "description": "Type the verification code sent to your email", "data": { "code": "Verification code" + }, + "data_description": { + "code": "The verification code sent to your email." } }, "reauth_confirm": { @@ -54,6 +60,25 @@ "vacuum_position": "Vacuum position", "virtual_walls": "Virtual walls", "zones": "Zones" + }, + "data_description": { + "charger": "Show the charger on the map.", + "cleaned_area": "Show the area cleaned on the map.", + "goto_path": "Show the go-to path on the map.", + "ignored_obstacles": "Show ignored obstacles on the map.", + "ignored_obstacles_with_photo": "Show ignored obstacles with photos on the map.", + "mop_path": "Show the mop path on the map.", + "no_carpet_zones": "Show the no carpet zones on the map.", + "no_go_zones": "Show the no-go zones on the map.", + "no_mopping_zones": "Show the no-mop zones on the map.", + "obstacles": "Show obstacles on the map.", + "obstacles_with_photo": "Show obstacles with photos on the map.", + "path": "Show the path on the map.", + "predicted_path": "Show the predicted path on the map.", + "room_names": "Show room names on the map.", + "vacuum_position": "Show the vacuum position on the map.", + "virtual_walls": "Show virtual walls on the map.", + "zones": "Show zones on the map." } } } @@ -156,6 +181,9 @@ "countdown": { "name": "Countdown" }, + "current_room": { + "name": "Current room" + }, "dock_error": { "name": "Dock error", "state": { @@ -353,6 +381,15 @@ }, "selected_map": { "name": "Selected map" + }, + "dust_collection_mode": { + "name": "Empty mode", + "state": { + "smart": "Smart", + "light": "Light", + "balanced": "[%key:component::roborock::entity::vacuum::roborock::state_attributes::fan_speed::state::balanced%]", + "max": "[%key:component::roborock::entity::select::mop_intensity::state::max%]" + } } }, "switch": { @@ -420,6 +457,12 @@ "map_failure": { "message": "Something went wrong creating the map" }, + "position_not_found": { + "message": "Robot position not found" + }, + "update_data_fail": { + "message": "Failed to update data" + }, "no_coordinators": { "message": "No devices were able to successfully setup" }, diff --git a/homeassistant/components/roborock/switch.py b/homeassistant/components/roborock/switch.py index 0171d59abfd..44feccdebac 100644 --- a/homeassistant/components/roborock/switch.py +++ b/homeassistant/components/roborock/switch.py @@ -24,6 +24,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockSwitchDescription(SwitchEntityDescription): @@ -35,6 +37,8 @@ class RoborockSwitchDescription(SwitchEntityDescription): update_value: Callable[[AttributeCache, bool], Coroutine[Any, Any, None]] # Attribute from cache attribute: str + # If it is a dock entity + is_dock_entity: bool = False SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ @@ -47,6 +51,7 @@ SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ key="child_lock", translation_key="child_lock", entity_category=EntityCategory.CONFIG, + is_dock_entity=True, ), RoborockSwitchDescription( cache_key=CacheableAttribute.flow_led_status, @@ -57,6 +62,7 @@ SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ key="status_indicator", translation_key="status_indicator", entity_category=EntityCategory.CONFIG, + is_dock_entity=True, ), RoborockSwitchDescription( cache_key=CacheableAttribute.dnd_timer, @@ -147,7 +153,13 @@ class RoborockSwitch(RoborockEntityV1, SwitchEntity): ) -> None: """Initialize the entity.""" self.entity_description = entity_description - super().__init__(unique_id, coordinator.device_info, coordinator.api) + super().__init__( + unique_id, + coordinator.device_info + if not entity_description.is_dock_entity + else coordinator.dock_device_info, + coordinator.api, + ) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" diff --git a/homeassistant/components/roborock/time.py b/homeassistant/components/roborock/time.py index 6aa70e300e5..83d341fa2dd 100644 --- a/homeassistant/components/roborock/time.py +++ b/homeassistant/components/roborock/time.py @@ -24,6 +24,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockTimeDescription(TimeEntityDescription): diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index 59abc888673..058fffbdb1c 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -1,11 +1,14 @@ """Support for Roborock vacuum class.""" -from dataclasses import asdict from typing import Any from roborock.code_mappings import RoborockStateCode from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand +from vacuum_map_parser_base.config.color import ColorsPalette +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.config.size import Sizes +from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser import voluptuous as vol from homeassistant.components.vacuum import ( @@ -26,7 +29,6 @@ from .const import ( ) from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 -from .image import ColorsPalette, ImageConfig, RoborockMapDataParser, Sizes STATE_CODE_TO_STATE = { RoborockStateCode.starting: VacuumActivity.IDLE, # "Starting" @@ -54,6 +56,8 @@ STATE_CODE_TO_STATE = { RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline" } +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -201,7 +205,14 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): """Get map information such as map id and room ids.""" return { "maps": [ - asdict(vacuum_map) for vacuum_map in self.coordinator.maps.values() + { + "flag": vacuum_map.flag, + "name": vacuum_map.name, + # JsonValueType does not accept a int as a key - was not a + # issue with previous asdict() implementation. + "rooms": vacuum_map.rooms, # type: ignore[dict-item] + } + for vacuum_map in self.coordinator.maps.values() ] } @@ -210,13 +221,18 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): map_data = await self.coordinator.cloud_api.get_map_v1() if not isinstance(map_data, bytes): - raise HomeAssistantError("Failed to retrieve map data.") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) parser = RoborockMapDataParser(ColorsPalette(), Sizes(), [], ImageConfig(), []) parsed_map = parser.parse(map_data) robot_position = parsed_map.vacuum_position if robot_position is None: - raise HomeAssistantError("Robot position not found") + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="position_not_found" + ) return { "x": robot_position.x, diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 04348bc3bfb..62f1f8b1736 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -47,7 +47,7 @@ "name": "Supports AirPlay" }, "supports_ethernet": { - "name": "Supports ethernet" + "name": "Supports Ethernet" }, "supports_find_remote": { "name": "Supports find remote" diff --git a/homeassistant/components/rova/strings.json b/homeassistant/components/rova/strings.json index 3b89fc789ee..21f4146bf78 100644 --- a/homeassistant/components/rova/strings.json +++ b/homeassistant/components/rova/strings.json @@ -4,7 +4,7 @@ "user": { "title": "Provide your address details", "data": { - "zip_code": "Your zip code", + "zip_code": "Your ZIP code", "house_number": "Your house number", "house_number_suffix": "A suffix for your house number" } diff --git a/homeassistant/components/schedule/strings.json b/homeassistant/components/schedule/strings.json index 8638e4a8a84..bb81c029dbf 100644 --- a/homeassistant/components/schedule/strings.json +++ b/homeassistant/components/schedule/strings.json @@ -28,7 +28,7 @@ }, "get_schedule": { "name": "Get schedule", - "description": "Retrieve one or multiple schedules." + "description": "Retrieves the configured time ranges of one or multiple schedules." } } } diff --git a/homeassistant/components/search/manifest.json b/homeassistant/components/search/manifest.json index cd372139451..42a54fe8b55 100644 --- a/homeassistant/components/search/manifest.json +++ b/homeassistant/components/search/manifest.json @@ -1,7 +1,6 @@ { "domain": "search", "name": "Search", - "after_dependencies": ["scene", "group", "automation", "script"], "codeowners": ["@home-assistant/core"], "dependencies": ["websocket_api"], "documentation": "https://www.home-assistant.io/integrations/search", diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index e6398c5076e..610695aaf7b 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -14,5 +14,6 @@ }, "iot_class": "cloud_polling", "loggers": ["pysensibo"], + "quality_scale": "platinum", "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensibo/quality_scale.yaml b/homeassistant/components/sensibo/quality_scale.yaml index c21cf100e9d..3d71d0ad3ba 100644 --- a/homeassistant/components/sensibo/quality_scale.yaml +++ b/homeassistant/components/sensibo/quality_scale.yaml @@ -19,9 +19,9 @@ rules: comment: | No integrations services. common-modules: done - docs-high-level-description: todo + docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done docs-actions: done brands: done # Silver @@ -39,9 +39,7 @@ rules: comment: | Tests are very complex and needs a rewrite for future additions integration-owner: done - docs-installation-parameters: - status: todo - comment: configuration_basic + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -71,13 +69,13 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo - docs-examples: todo + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done # Platinum async-dependency: done diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index 6aba2be52fc..0fbcda461c8 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -330,7 +330,7 @@ "timer_on_switch": { "name": "Timer", "state_attributes": { - "id": { "name": "Id" }, + "id": { "name": "ID" }, "turn_on": { "name": "Turns on", "state": { @@ -594,7 +594,7 @@ "issues": { "deprecated_entity_horizontalswing": { "title": "The Sensibo {name} entity is deprecated", - "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\n, Disable the `{entity}` and reload the config entry or restart Home Assistant to fix this issue." + "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\nDisable `{entity}` and reload the config entry or restart Home Assistant to fix this issue." } } } diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index e3ee566a855..e06ee85cd03 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -44,6 +44,7 @@ from .const import ( # noqa: F401 DEVICE_CLASSES_SCHEMA, DOMAIN, NON_NUMERIC_DEVICE_CLASSES, + STATE_CLASS_UNITS, STATE_CLASSES, STATE_CLASSES_SCHEMA, UNIT_CONVERTERS, @@ -713,6 +714,18 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): report_issue, ) + # Validate unit of measurement used for sensors with a state class + if ( + state_class + and (units := STATE_CLASS_UNITS.get(state_class)) is not None + and native_unit_of_measurement not in units + ): + raise ValueError( + f"Sensor {self.entity_id} ({type(self)}) is using native unit of " + f"measurement '{native_unit_of_measurement}' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) + return value def _display_precision_or_none(self) -> int | None: diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 8eccb758756..63af8e5bf52 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -186,7 +186,7 @@ class SensorDeviceClass(StrEnum): DURATION = "duration" """Fixed duration. - Unit of measurement: `d`, `h`, `min`, `s`, `ms` + Unit of measurement: `d`, `h`, `min`, `s`, `ms`, `µs` """ ENERGY = "energy" @@ -491,6 +491,9 @@ class SensorStateClass(StrEnum): MEASUREMENT = "measurement" """The state represents a measurement in present time.""" + MEASUREMENT_ANGLE = "measurement_angle" + """The state represents a angle measurement in present time. Currently only degrees are supported.""" + TOTAL = "total" """The state represents a total amount. @@ -558,6 +561,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { UnitOfTime.MINUTES, UnitOfTime.SECONDS, UnitOfTime.MILLISECONDS, + UnitOfTime.MICROSECONDS, }, SensorDeviceClass.ENERGY: set(UnitOfEnergy), SensorDeviceClass.ENERGY_DISTANCE: set(UnitOfEnergyDistance), @@ -582,6 +586,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, SensorDeviceClass.POWER: { + UnitOfPower.MILLIWATT, UnitOfPower.WATT, UnitOfPower.KILO_WATT, UnitOfPower.MEGA_WATT, @@ -691,6 +696,11 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorStateClass.TOTAL, SensorStateClass.TOTAL_INCREASING, }, - SensorDeviceClass.WIND_DIRECTION: set(), + SensorDeviceClass.WIND_DIRECTION: {SensorStateClass.MEASUREMENT_ANGLE}, SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT}, } + + +STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]] = { + SensorStateClass.MEASUREMENT_ANGLE: {DEGREE}, +} diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 675d24b9240..cb80fa7d2ce 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Callable, Iterable from contextlib import suppress +from dataclasses import dataclass import datetime import itertools import logging @@ -21,6 +22,7 @@ from homeassistant.components.recorder import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, StatisticResult, ) @@ -52,10 +54,22 @@ from .const import ( _LOGGER = logging.getLogger(__name__) + +@dataclass +class _StatisticsConfig: + types: set[str] + mean_type: StatisticMeanType = StatisticMeanType.NONE + + DEFAULT_STATISTICS = { - SensorStateClass.MEASUREMENT: {"mean", "min", "max"}, - SensorStateClass.TOTAL: {"sum"}, - SensorStateClass.TOTAL_INCREASING: {"sum"}, + SensorStateClass.MEASUREMENT: _StatisticsConfig( + {"mean", "min", "max"}, StatisticMeanType.ARITHMETIC + ), + SensorStateClass.MEASUREMENT_ANGLE: _StatisticsConfig( + {"mean"}, StatisticMeanType.CIRCULAR + ), + SensorStateClass.TOTAL: _StatisticsConfig({"sum"}), + SensorStateClass.TOTAL_INCREASING: _StatisticsConfig({"sum"}), } EQUIVALENT_UNITS = { @@ -76,8 +90,15 @@ WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_nega # Keep track of entities for which a warning about unsupported unit has been logged WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit") WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit") +# Keep track of entities for which a warning about statistics mean algorithm change has been logged +WARN_STATISTICS_MEAN_CHANGED: HassKey[set[str]] = HassKey( + f"{DOMAIN}_warn_statistics_mean_change" +) # Link to dev statistics where issues around LTS can be fixed LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics" +STATE_CLASS_REMOVED_ISSUE = "state_class_removed" +UNITS_CHANGED_ISSUE = "units_changed" +MEAN_TYPE_CHANGED_ISSUE = "mean_type_changed" def _get_sensor_states(hass: HomeAssistant) -> list[State]: @@ -99,7 +120,7 @@ def _get_sensor_states(hass: HomeAssistant) -> list[State]: ] -def _time_weighted_average( +def _time_weighted_arithmetic_mean( fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime ) -> float: """Calculate a time weighted average. @@ -134,16 +155,44 @@ def _time_weighted_average( duration = end - old_start_time accumulated += old_fstate * duration.total_seconds() - period_seconds = (end - start).total_seconds() - if period_seconds == 0: - # If the only state changed that happened was at the exact moment - # at the end of the period, we can't calculate a meaningful average - # so we return 0.0 since it represents a time duration smaller than - # we can measure. This probably means the precision of statistics - # column schema in the database is incorrect but it is actually possible - # to happen if the state change event fired at the exact microsecond - return 0.0 - return accumulated / period_seconds + return accumulated / (end - start).total_seconds() + + +def _time_weighted_circular_mean( + fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime +) -> float: + """Calculate a time weighted circular mean. + + The circular mean is calculated by weighting the states by duration in seconds between + state changes. + Note: there's no interpolation of values between state changes. + """ + old_fstate: float | None = None + old_start_time: datetime.datetime | None = None + values: list[tuple[float, float]] = [] + + for fstate, state in fstates: + # The recorder will give us the last known state, which may be well + # before the requested start time for the statistics + start_time = max(state.last_updated, start) + if old_start_time is None: + # Adjust start time, if there was no last known state + start = start_time + else: + duration = (start_time - old_start_time).total_seconds() + assert old_fstate is not None + values.append((old_fstate, duration)) + + old_fstate = fstate + old_start_time = start_time + + if old_fstate is not None: + # Add last value weighted by duration until end of the period + assert old_start_time is not None + duration = (end - old_start_time).total_seconds() + values.append((old_fstate, duration)) + + return statistics.weighted_circular_mean(values) def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]: @@ -371,7 +420,7 @@ def reset_detected( return fstate < 0.9 * previous_fstate -def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]: +def _wanted_statistics(sensor_states: list[State]) -> dict[str, _StatisticsConfig]: """Prepare a dict with wanted statistics for entities.""" return { state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]] @@ -415,7 +464,9 @@ def compile_statistics( # noqa: C901 wanted_statistics = _wanted_statistics(sensor_states) # Get history between start and end entities_full_history = [ - i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id] + i.entity_id + for i in sensor_states + if "sum" in wanted_statistics[i.entity_id].types ] history_list: dict[str, list[State]] = {} if entities_full_history: @@ -430,7 +481,7 @@ def compile_statistics( # noqa: C901 entities_significant_history = [ i.entity_id for i in sensor_states - if "sum" not in wanted_statistics[i.entity_id] + if "sum" not in wanted_statistics[i.entity_id].types ] if entities_significant_history: _history_list = history.get_full_significant_states_with_session( @@ -447,7 +498,11 @@ def compile_statistics( # noqa: C901 entity_id = _state.entity_id # If there are no recent state changes, the sensor's state may already be pruned # from the recorder. Get the state from the state machine instead. - if not (entity_history := history_list.get(entity_id, [_state])): + try: + entity_history = history_list[entity_id] + except KeyError: + entity_history = [_state] if _state.last_changed < end else [] + if not entity_history: continue if not (float_states := _entity_history_to_float_and_state(entity_history)): continue @@ -476,7 +531,7 @@ def compile_statistics( # noqa: C901 continue state_class: str = _state.attributes[ATTR_STATE_CLASS] to_process.append((entity_id, statistics_unit, state_class, valid_float_states)) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: to_query.add(entity_id) last_stats = statistics.get_latest_short_term_statistics_with_session( @@ -488,6 +543,10 @@ def compile_statistics( # noqa: C901 state_class, valid_float_states, ) in to_process: + mean_type = StatisticMeanType.NONE + if "mean" in wanted_statistics[entity_id].types: + mean_type = wanted_statistics[entity_id].mean_type + # Check metadata if old_metadata := old_metadatas.get(entity_id): if not _equivalent_units( @@ -513,10 +572,34 @@ def compile_statistics( # noqa: C901 ) continue + if ( + mean_type is not StatisticMeanType.NONE + and (old_mean_type := old_metadata[1]["mean_type"]) + is not StatisticMeanType.NONE + and mean_type != old_mean_type + ): + if WARN_STATISTICS_MEAN_CHANGED not in hass.data: + hass.data[WARN_STATISTICS_MEAN_CHANGED] = set() + if entity_id not in hass.data[WARN_STATISTICS_MEAN_CHANGED]: + hass.data[WARN_STATISTICS_MEAN_CHANGED].add(entity_id) + _LOGGER.warning( + ( + "The statistics mean algorithm for %s have changed from %s to %s." + " Generation of long term statistics will be suppressed" + " unless it changes back or go to %s to delete the old" + " statistics" + ), + entity_id, + old_mean_type.name, + mean_type.name, + LINK_DEV_STATISTICS, + ) + continue + # Set meta data meta: StatisticMetaData = { - "has_mean": "mean" in wanted_statistics[entity_id], - "has_sum": "sum" in wanted_statistics[entity_id], + "mean_type": mean_type, + "has_sum": "sum" in wanted_statistics[entity_id].types, "name": None, "source": RECORDER_DOMAIN, "statistic_id": entity_id, @@ -525,19 +608,26 @@ def compile_statistics( # noqa: C901 # Make calculations stat: StatisticData = {"start": start} - if "max" in wanted_statistics[entity_id]: + if "max" in wanted_statistics[entity_id].types: stat["max"] = max( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "min" in wanted_statistics[entity_id]: + if "min" in wanted_statistics[entity_id].types: stat["min"] = min( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "mean" in wanted_statistics[entity_id]: - stat["mean"] = _time_weighted_average(valid_float_states, start, end) + match mean_type: + case StatisticMeanType.ARITHMETIC: + stat["mean"] = _time_weighted_arithmetic_mean( + valid_float_states, start, end + ) + case StatisticMeanType.CIRCULAR: + stat["mean"] = _time_weighted_circular_mean( + valid_float_states, start, end + ) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: last_reset = old_last_reset = None new_state = old_state = None _sum = 0.0 @@ -661,18 +751,25 @@ def list_statistic_ids( attributes = state.attributes state_class = attributes[ATTR_STATE_CLASS] provided_statistics = DEFAULT_STATISTICS[state_class] - if statistic_type is not None and statistic_type not in provided_statistics: + if ( + statistic_type is not None + and statistic_type not in provided_statistics.types + ): continue if ( - (has_sum := "sum" in provided_statistics) + (has_sum := "sum" in provided_statistics.types) and ATTR_LAST_RESET not in attributes and state_class == SensorStateClass.MEASUREMENT ): continue + mean_type = StatisticMeanType.NONE + if "mean" in provided_statistics.types: + mean_type = provided_statistics.mean_type + result[entity_id] = { - "has_mean": "mean" in provided_statistics, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": RECORDER_DOMAIN, @@ -702,7 +799,7 @@ def _update_issues( if numeric and state_class is None: # Sensor no longer has a valid state class report_issue( - "state_class_removed", + STATE_CLASS_REMOVED_ISSUE, entity_id, {"statistic_id": entity_id}, ) @@ -713,7 +810,7 @@ def _update_issues( if numeric and not _equivalent_units({state_unit, metadata_unit}): # The unit has changed, and it's not possible to convert report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -727,7 +824,7 @@ def _update_issues( valid_units = (unit or "" for unit in converter.VALID_UNITS) valid_units_str = ", ".join(sorted(valid_units)) report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -737,6 +834,23 @@ def _update_issues( }, ) + if ( + (metadata_mean_type := metadata[1]["mean_type"]) is not None + and state_class + and (state_mean_type := DEFAULT_STATISTICS[state_class].mean_type) + != metadata_mean_type + ): + # The mean type has changed and the old statistics are not valid anymore + report_issue( + MEAN_TYPE_CHANGED_ISSUE, + entity_id, + { + "statistic_id": entity_id, + "metadata_mean_type": metadata_mean_type, + "state_mean_type": state_mean_type, + }, + ) + def update_statistics_issues( hass: HomeAssistant, @@ -759,7 +873,11 @@ def update_statistics_issues( issue.domain != DOMAIN or not (issue_data := issue.data) or issue_data.get("issue_type") - not in ("state_class_removed", "units_changed") + not in ( + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, + MEAN_TYPE_CHANGED_ISSUE, + ) ): continue issues.add(issue.issue_id) diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index ae414a178e9..fe6684a9ca4 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -309,6 +309,10 @@ } }, "issues": { + "mean_type_changed": { + "title": "The mean type of {statistic_id} has changed", + "description": "" + }, "state_class_removed": { "title": "{statistic_id} no longer has a state class", "description": "" diff --git a/homeassistant/components/sentry/strings.json b/homeassistant/components/sentry/strings.json index efcdb631f3c..22f7b355e0e 100644 --- a/homeassistant/components/sentry/strings.json +++ b/homeassistant/components/sentry/strings.json @@ -24,7 +24,7 @@ "event_handled": "Send handled events", "event_third_party_packages": "Send events from third-party packages", "logging_event_level": "The log level Sentry will register an event for", - "logging_level": "The log level Sentry will record logs as breadcrums for", + "logging_level": "The log level Sentry will record events as breadcrumbs for", "tracing": "Enable performance tracing", "tracing_sample_rate": "Tracing sample rate; between 0.0 and 1.0 (1.0 = 100%)" } diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 7440013940c..ee28c41f18b 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Final +from aioshelly.ble.const import BLE_SCRIPT_NAME from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions from aioshelly.const import DEFAULT_COAP_PORT, RPC_GENERATIONS @@ -11,6 +12,7 @@ from aioshelly.exceptions import ( DeviceConnectionError, InvalidAuthError, MacAddressMismatchError, + RpcCallError, ) from aioshelly.rpc_device import RpcDevice, bluetooth_mac_from_primary_mac import voluptuous as vol @@ -59,6 +61,7 @@ from .utils import ( get_coap_context, get_device_entry_gen, get_http_port, + get_rpc_scripts_event_types, get_ws_context, ) @@ -108,6 +111,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> bool: """Set up Shelly from a config entry.""" + entry.runtime_data = ShellyEntryData([]) + # The custom component for Shelly devices uses shelly domain as well as core # integration. If the user removes the custom component but doesn't remove the # config entry, core integration will try to configure that config entry with an @@ -159,7 +164,8 @@ async def _async_setup_block_entry( device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(BLOCK_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = BLOCK_SLEEPING_PLATFORMS # Some old firmware have a wrong sleep period hardcoded value. # Following code block will force the right value for affected devices @@ -186,13 +192,25 @@ async def _async_setup_block_entry( if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) except (DeviceConnectionError, MacAddressMismatchError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.block = ShellyBlockCoordinator(hass, entry, device) runtime_data.block.async_setup() @@ -258,7 +276,8 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(RPC_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = RPC_SLEEPING_PLATFORMS if sleep_period == 0: # Not a sleeping device, finish setup @@ -269,13 +288,30 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady - except (DeviceConnectionError, MacAddressMismatchError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) + runtime_data.rpc_supports_scripts = await device.supports_scripts() + if runtime_data.rpc_supports_scripts: + runtime_data.rpc_script_events = await get_rpc_scripts_event_types( + device, ignore_scripts=[BLE_SCRIPT_NAME] + ) + except (DeviceConnectionError, MacAddressMismatchError, RpcCallError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup() diff --git a/homeassistant/components/shelly/binary_sensor.py b/homeassistant/components/shelly/binary_sensor.py index ed2ac68d264..b74578f1fb3 100644 --- a/homeassistant/components/shelly/binary_sensor.py +++ b/homeassistant/components/shelly/binary_sensor.py @@ -130,6 +130,7 @@ SENSORS: dict[tuple[str, str], BlockBinarySensorDescription] = { device_class=BinarySensorDeviceClass.GAS, translation_key="gas", value=lambda value: value in ["mild", "heavy"], + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: {"detected": block.gas}, ), ("sensor", "smoke"): BlockBinarySensorDescription( diff --git a/homeassistant/components/shelly/bluetooth/__init__.py b/homeassistant/components/shelly/bluetooth/__init__.py index cad1b9f044d..2b772bd1b78 100644 --- a/homeassistant/components/shelly/bluetooth/__init__.py +++ b/homeassistant/components/shelly/bluetooth/__init__.py @@ -7,7 +7,10 @@ from typing import TYPE_CHECKING from aioshelly.ble import async_start_scanner, create_scanner from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT, BLE_SCAN_RESULT_VERSION -from homeassistant.components.bluetooth import async_register_scanner +from homeassistant.components.bluetooth import ( + BluetoothScanningMode, + async_register_scanner, +) from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback from ..const import BLEScannerMode @@ -15,6 +18,11 @@ from ..const import BLEScannerMode if TYPE_CHECKING: from ..coordinator import ShellyRpcCoordinator +BLE_SCANNER_MODE_TO_BLUETOOTH_SCANNING_MODE = { + BLEScannerMode.PASSIVE: BluetoothScanningMode.PASSIVE, + BLEScannerMode.ACTIVE: BluetoothScanningMode.ACTIVE, +} + async def async_connect_scanner( hass: HomeAssistant, @@ -25,7 +33,13 @@ async def async_connect_scanner( """Connect scanner.""" device = coordinator.device entry = coordinator.config_entry - scanner = create_scanner(coordinator.bluetooth_source, entry.title) + bluetooth_scanning_mode = BLE_SCANNER_MODE_TO_BLUETOOTH_SCANNING_MODE[scanner_mode] + scanner = create_scanner( + coordinator.bluetooth_source, + entry.title, + requested_mode=bluetooth_scanning_mode, + current_mode=bluetooth_scanning_mode, + ) unload_callbacks = [ async_register_scanner( hass, diff --git a/homeassistant/components/shelly/button.py b/homeassistant/components/shelly/button.py index 1f3c555a64b..06dffba5ead 100644 --- a/homeassistant/components/shelly/button.py +++ b/homeassistant/components/shelly/button.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from collections.abc import Callable from dataclasses import dataclass from functools import partial from typing import TYPE_CHECKING, Any, Final -from aioshelly.const import RPC_GENERATIONS +from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY, RPC_GENERATIONS +from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError from homeassistant.components.button import ( ButtonDeviceClass, @@ -16,15 +17,20 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + CONNECTION_NETWORK_MAC, + DeviceInfo, +) from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import slugify -from .const import LOGGER, SHELLY_GAS_MODELS +from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator -from .utils import get_device_entry_gen +from .utils import get_device_entry_gen, get_rpc_key_ids @dataclass(frozen=True, kw_only=True) @@ -33,7 +39,7 @@ class ShellyButtonDescription[ ](ButtonEntityDescription): """Class to describe a Button entity.""" - press_action: Callable[[_ShellyCoordinatorT], Coroutine[Any, Any, None]] + press_action: str supported: Callable[[_ShellyCoordinatorT], bool] = lambda _: True @@ -44,14 +50,14 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Reboot", device_class=ButtonDeviceClass.RESTART, entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_reboot(), + press_action="trigger_reboot", ), ShellyButtonDescription[ShellyBlockCoordinator]( key="self_test", name="Self test", translation_key="self_test", entity_category=EntityCategory.DIAGNOSTIC, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_self_test(), + press_action="trigger_shelly_gas_self_test", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ShellyButtonDescription[ShellyBlockCoordinator]( @@ -59,7 +65,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Mute", translation_key="mute", entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_mute(), + press_action="trigger_shelly_gas_mute", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ShellyButtonDescription[ShellyBlockCoordinator]( @@ -67,11 +73,22 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Unmute", translation_key="unmute", entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_unmute(), + press_action="trigger_shelly_gas_unmute", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ] +BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [ + ShellyButtonDescription[ShellyRpcCoordinator]( + key="calibrate", + name="Calibrate", + translation_key="calibrate", + entity_category=EntityCategory.CONFIG, + press_action="trigger_blu_trv_calibration", + supported=lambda coordinator: coordinator.device.model == MODEL_BLU_GATEWAY, + ), +] + @callback def async_migrate_unique_ids( @@ -123,14 +140,28 @@ async def async_setup_entry( hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator) ) - async_add_entities( + entities: list[ShellyButton | ShellyBluTrvButton] = [] + + entities.extend( ShellyButton(coordinator, button) for button in BUTTONS if button.supported(coordinator) ) + if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER): + if TYPE_CHECKING: + assert isinstance(coordinator, ShellyRpcCoordinator) -class ShellyButton( + entities.extend( + ShellyBluTrvButton(coordinator, button, id_) + for id_ in blutrv_key_ids + for button in BLU_TRV_BUTTONS + ) + + async_add_entities(entities) + + +class ShellyBaseButton( CoordinatorEntity[ShellyRpcCoordinator | ShellyBlockCoordinator], ButtonEntity ): """Defines a Shelly base button.""" @@ -148,14 +179,98 @@ class ShellyButton( ) -> None: """Initialize Shelly button.""" super().__init__(coordinator) + self.entity_description = description + async def async_press(self) -> None: + """Triggers the Shelly button press service.""" + try: + await self._press_method() + except DeviceConnectionError as err: + self.coordinator.last_update_success = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err + except RpcCallError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="rpc_call_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err + except InvalidAuthError: + await self.coordinator.async_shutdown_device_and_start_reauth() + + async def _press_method(self) -> None: + """Press method.""" + raise NotImplementedError + + +class ShellyButton(ShellyBaseButton): + """Defines a Shelly button.""" + + def __init__( + self, + coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator, + description: ShellyButtonDescription[ + ShellyRpcCoordinator | ShellyBlockCoordinator + ], + ) -> None: + """Initialize Shelly button.""" + super().__init__(coordinator, description) + self._attr_name = f"{coordinator.device.name} {description.name}" self._attr_unique_id = f"{coordinator.mac}_{description.key}" self._attr_device_info = DeviceInfo( connections={(CONNECTION_NETWORK_MAC, coordinator.mac)} ) - async def async_press(self) -> None: - """Triggers the Shelly button press service.""" - await self.entity_description.press_action(self.coordinator) + async def _press_method(self) -> None: + """Press method.""" + method = getattr(self.coordinator.device, self.entity_description.press_action) + + if TYPE_CHECKING: + assert method is not None + + await method() + + +class ShellyBluTrvButton(ShellyBaseButton): + """Represent a Shelly BLU TRV button.""" + + def __init__( + self, + coordinator: ShellyRpcCoordinator, + description: ShellyButtonDescription, + id_: int, + ) -> None: + """Initialize.""" + super().__init__(coordinator, description) + + ble_addr: str = coordinator.device.config[f"{BLU_TRV_IDENTIFIER}:{id_}"]["addr"] + device_name = ( + coordinator.device.config[f"{BLU_TRV_IDENTIFIER}:{id_}"]["name"] + or f"shellyblutrv-{ble_addr.replace(':', '')}" + ) + self._attr_name = f"{device_name} {description.name}" + self._attr_unique_id = f"{ble_addr}_{description.key}" + self._attr_device_info = DeviceInfo( + connections={(CONNECTION_BLUETOOTH, ble_addr)} + ) + self._id = id_ + + async def _press_method(self) -> None: + """Press method.""" + method = getattr(self.coordinator.device, self.entity_description.press_action) + + if TYPE_CHECKING: + assert method is not None + + await method(self._id) diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index a3ec9be7cb0..498f2d3dba9 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -7,7 +7,12 @@ from dataclasses import asdict, dataclass from typing import Any, cast from aioshelly.block_device import Block -from aioshelly.const import BLU_TRV_IDENTIFIER, BLU_TRV_MODEL_NAME, RPC_GENERATIONS +from aioshelly.const import ( + BLU_TRV_IDENTIFIER, + BLU_TRV_MODEL_NAME, + BLU_TRV_TIMEOUT, + RPC_GENERATIONS, +) from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from homeassistant.components.climate import ( @@ -36,7 +41,6 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from .const import ( BLU_TRV_TEMPERATURE_SETTINGS, - BLU_TRV_TIMEOUT, DOMAIN, LOGGER, NOT_CALIBRATED_ISSUE_ID, @@ -322,8 +326,12 @@ class BlockSleepingClimate( except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index 8e47235c981..200a88ea24c 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -7,12 +7,7 @@ from typing import Any, Final from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions, get_info -from aioshelly.const import ( - BLOCK_GENERATIONS, - DEFAULT_HTTP_PORT, - MODEL_WALL_DISPLAY, - RPC_GENERATIONS, -) +from aioshelly.const import BLOCK_GENERATIONS, DEFAULT_HTTP_PORT, RPC_GENERATIONS from aioshelly.exceptions import ( CustomPortNotSupported, DeviceConnectionError, @@ -22,12 +17,7 @@ from aioshelly.exceptions import ( from aioshelly.rpc_device import RpcDevice import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -49,7 +39,7 @@ from .const import ( LOGGER, BLEScannerMode, ) -from .coordinator import async_reconnect_soon +from .coordinator import ShellyConfigEntry, async_reconnect_soon from .utils import ( get_block_device_sleep_period, get_coap_context, @@ -458,19 +448,17 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: + def async_get_options_flow(config_entry: ShellyConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler() @classmethod @callback - def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool: + def async_supports_options_flow(cls, config_entry: ShellyConfigEntry) -> bool: """Return options flow support for this handler.""" - return ( - get_device_entry_gen(config_entry) in RPC_GENERATIONS - and not config_entry.data.get(CONF_SLEEP_PERIOD) - and config_entry.data.get(CONF_MODEL) != MODEL_WALL_DISPLAY - ) + return get_device_entry_gen( + config_entry + ) in RPC_GENERATIONS and not config_entry.data.get(CONF_SLEEP_PERIOD) class OptionsFlowHandler(OptionsFlow): @@ -480,6 +468,13 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" + if ( + supports_scripts := self.config_entry.runtime_data.rpc_supports_scripts + ) is None: + return self.async_abort(reason="cannot_connect") + if not supports_scripts: + return self.async_abort(reason="no_scripts_support") + if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index d47f2b0ae80..43fb6df18d0 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -25,6 +25,7 @@ from aioshelly.const import ( MODEL_VALVE, MODEL_VINTAGE_V2, MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, ) from homeassistant.components.number import NumberMode @@ -245,6 +246,7 @@ GEN2_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen2/changelog/" GEN2_BETA_RELEASE_URL = f"{GEN2_RELEASE_URL}#unreleased" DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, MODEL_MOTION, MODEL_MOTION_2, MODEL_VALVE, @@ -271,9 +273,6 @@ API_WS_URL = "/api/shelly/ws" COMPONENT_ID_PATTERN = re.compile(r"[a-z\d]+:\d+") -# value confirmed by Shelly team -BLU_TRV_TIMEOUT = 60 - ROLE_TO_DEVICE_CLASS_MAP = { "current_humidity": SensorDeviceClass.HUMIDITY, "current_temperature": SensorDeviceClass.TEMPERATURE, diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 95812c12e10..4a1ea72f38a 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -88,6 +88,8 @@ class ShellyEntryData: rest: ShellyRestCoordinator | None = None rpc: ShellyRpcCoordinator | None = None rpc_poll: ShellyRpcPollingCoordinator | None = None + rpc_script_events: dict[int, list[str]] | None = None + rpc_supports_scripts: bool | None = None type ShellyConfigEntry = ConfigEntry[ShellyEntryData] @@ -377,14 +379,23 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) LOGGER.debug("Polling Shelly Block Device - %s", self.name) try: await self.device.update() except DeviceConnectionError as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() @@ -469,7 +480,11 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]): return await self.device.update_shelly() except (DeviceConnectionError, MacAddressMismatchError) as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() else: @@ -635,7 +650,12 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) async with self._connection_lock: @@ -643,7 +663,11 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): return if not await self._async_device_connect_task(): - raise UpdateFailed("Device reconnect error") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_reconnect_error", + translation_placeholders={"device": self.name}, + ) async def _async_disconnected(self, reconnect: bool) -> None: """Handle device disconnected.""" @@ -693,7 +717,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): is updated. """ if not self.sleep_period: - await self._async_connect_ble_scanner() + if self.config_entry.runtime_data.rpc_supports_scripts: + await self._async_connect_ble_scanner() else: await self._async_setup_outbound_websocket() @@ -819,13 +844,21 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_update_data(self) -> None: """Fetch data.""" if not self.device.connected: - raise UpdateFailed("Device disconnected") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_device_disconnected", + translation_placeholders={"device": self.name}, + ) LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: - raise UpdateFailed(f"Device disconnected: {err!r}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/device_trigger.py b/homeassistant/components/shelly/device_trigger.py index 6e96eb5ed21..740e6aae9b2 100644 --- a/homeassistant/components/shelly/device_trigger.py +++ b/homeassistant/components/shelly/device_trigger.py @@ -105,7 +105,9 @@ async def async_validate_trigger_config( return config raise InvalidDeviceAutomationConfig( - f"Invalid ({CONF_TYPE},{CONF_SUBTYPE}): {trigger}" + translation_domain=DOMAIN, + translation_key="invalid_trigger", + translation_placeholders={"trigger": str(trigger)}, ) @@ -137,7 +139,11 @@ async def async_get_triggers( return triggers - raise InvalidDeviceAutomationConfig(f"Device not found: {device_id}") + raise InvalidDeviceAutomationConfig( + translation_domain=DOMAIN, + translation_key="device_not_found", + translation_placeholders={"device": device_id}, + ) async def async_attach_trigger( diff --git a/homeassistant/components/shelly/diagnostics.py b/homeassistant/components/shelly/diagnostics.py index cac2bb2f16b..2a9699e0a08 100644 --- a/homeassistant/components/shelly/diagnostics.py +++ b/homeassistant/components/shelly/diagnostics.py @@ -79,12 +79,14 @@ async def async_get_config_entry_diagnostics( device_settings = { k: v for k, v in rpc_coordinator.device.config.items() if k in ["cloud"] } - ws_config = rpc_coordinator.device.config["ws"] - device_settings["ws_outbound_enabled"] = ws_config["enable"] - if ws_config["enable"]: - device_settings["ws_outbound_server_valid"] = bool( - ws_config["server"] == get_rpc_ws_url(hass) - ) + if not (ws_config := rpc_coordinator.device.config.get("ws", {})): + device_settings["ws_outbound"] = "not supported" + if (ws_outbound_enabled := ws_config.get("enable")) is not None: + device_settings["ws_outbound_enabled"] = ws_outbound_enabled + if ws_outbound_enabled: + device_settings["ws_outbound_server_valid"] = bool( + ws_config["server"] == get_rpc_ws_url(hass) + ) device_status = { k: v for k, v in rpc_coordinator.device.status.items() diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 001727c74b3..9ed3f47b41a 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_SLEEP_PERIOD, LOGGER +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .utils import ( async_remove_shelly_entity, @@ -296,7 +296,6 @@ class RpcEntityDescription(EntityDescription): value: Callable[[Any, Any], Any] | None = None available: Callable[[dict], bool] | None = None removal_condition: Callable[[dict, dict, str], bool] | None = None - extra_state_attributes: Callable[[dict, dict], dict | None] | None = None use_polling_coordinator: bool = False supported: Callable = lambda _: False unit: Callable[[dict], str | None] | None = None @@ -313,7 +312,6 @@ class RestEntityDescription(EntityDescription): name: str = "" value: Callable[[dict, Any], Any] | None = None - extra_state_attributes: Callable[[dict], dict | None] | None = None class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]): @@ -347,8 +345,12 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() @@ -408,13 +410,21 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Call RPC for {self.name} connection error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except RpcCallError as err: raise HomeAssistantError( - f"Call RPC for {self.name} request error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="rpc_call_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/event.py b/homeassistant/components/shelly/event.py index bfd705f447a..ec5810581b1 100644 --- a/homeassistant/components/shelly/event.py +++ b/homeassistant/components/shelly/event.py @@ -34,7 +34,6 @@ from .utils import ( get_device_entry_gen, get_rpc_entity_name, get_rpc_key_instances, - get_rpc_script_event_types, is_block_momentary_input, is_rpc_momentary_input, ) @@ -109,18 +108,15 @@ async def async_setup_entry( script_instances = get_rpc_key_instances( coordinator.device.status, SCRIPT_EVENT.key ) + script_events = config_entry.runtime_data.rpc_script_events for script in script_instances: script_name = get_rpc_entity_name(coordinator.device, script) if script_name == BLE_SCRIPT_NAME: continue - event_types = await get_rpc_script_event_types( - coordinator.device, int(script.split(":")[-1]) - ) - if not event_types: - continue - - entities.append(ShellyRpcScriptEvent(coordinator, script, event_types)) + script_id = int(script.split(":")[-1]) + if script_events and (event_types := script_events[script_id]): + entities.append(ShellyRpcScriptEvent(coordinator, script, event_types)) # If a script is removed, from the device configuration, we need to remove orphaned entities async_remove_orphaned_entities( diff --git a/homeassistant/components/shelly/icons.json b/homeassistant/components/shelly/icons.json index f93abf6b854..08b269a73c5 100644 --- a/homeassistant/components/shelly/icons.json +++ b/homeassistant/components/shelly/icons.json @@ -23,12 +23,18 @@ "gas_concentration": { "default": "mdi:gauge" }, + "gas_detected": { + "default": "mdi:gas-burner" + }, "lamp_life": { "default": "mdi:progress-wrench" }, "operation": { "default": "mdi:cog-transfer" }, + "self_test": { + "default": "mdi:progress-wrench" + }, "tilt": { "default": "mdi:angle-acute" }, diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index c8ac5520b13..e863720e476 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioshelly"], - "requirements": ["aioshelly==13.2.0"], + "requirements": ["aioshelly==13.4.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index 59716f39c7f..c629eb4a57a 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Final, cast from aioshelly.block_device import Block -from aioshelly.const import RPC_GENERATIONS +from aioshelly.const import BLU_TRV_TIMEOUT, RPC_GENERATIONS from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from homeassistant.components.number import ( @@ -25,7 +25,7 @@ from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceIn from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry -from .const import BLU_TRV_TIMEOUT, CONF_SLEEP_PERIOD, LOGGER, VIRTUAL_NUMBER_MODE_MAP +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER, VIRTUAL_NUMBER_MODE_MAP from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, @@ -324,8 +324,12 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {params}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 183a1aa06a1..79e4c97aead 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -39,7 +39,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.typing import StateType -from .const import CONF_SLEEP_PERIOD, ROLE_TO_DEVICE_CLASS_MAP, SHAIR_MAX_WORK_HOURS +from .const import CONF_SLEEP_PERIOD, ROLE_TO_DEVICE_CLASS_MAP from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, @@ -58,6 +58,7 @@ from .utils import ( async_remove_orphaned_entities, get_device_entry_gen, get_device_uptime, + get_shelly_air_lamp_life, get_virtual_component_ids, is_rpc_wifi_stations_disabled, ) @@ -355,8 +356,9 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { name="Lamp life", native_unit_of_measurement=PERCENTAGE, translation_key="lamp_life", - value=lambda value: 100 - (value / 3600 / SHAIR_MAX_WORK_HOURS), + value=get_shelly_air_lamp_life, suggested_display_precision=1, + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: { "Operational hours": round(cast(int, block.totalWorkTime) / 3600, 1) }, @@ -374,9 +376,10 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { key="sensor|sensorOp", name="Operation", device_class=SensorDeviceClass.ENUM, - options=["unknown", "warmup", "normal", "fault"], + options=["warmup", "normal", "fault"], translation_key="operation", - value=lambda value: value, + value=lambda value: None if value == "unknown" else value, + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: {"self_test": block.selfTest}, ), ("valve", "valve"): BlockSensorDescription( @@ -391,11 +394,33 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { "failure", "opened", "opening", - "unknown", ], + value=lambda value: None if value == "unknown" else value, entity_category=EntityCategory.DIAGNOSTIC, removal_condition=lambda _, block: block.valve == "not_connected", ), + ("sensor", "gas"): BlockSensorDescription( + key="sensor|gas", + name="Gas detected", + translation_key="gas_detected", + device_class=SensorDeviceClass.ENUM, + options=[ + "none", + "mild", + "heavy", + "test", + ], + value=lambda value: None if value == "unknown" else value, + entity_category=EntityCategory.DIAGNOSTIC, + ), + ("sensor", "selfTest"): BlockSensorDescription( + key="sensor|selfTest", + name="Self test", + translation_key="self_test", + device_class=SensorDeviceClass.ENUM, + options=["not_completed", "completed", "running", "pending"], + entity_category=EntityCategory.DIAGNOSTIC, + ), } REST_SENSORS: Final = { diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index eb869b54e4c..afc3f92a3ce 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -17,12 +17,20 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Username for the device's web panel.", + "password": "Password for the device's web panel." } }, "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::shelly::config::step::credentials::data_description::username%]", + "password": "[%key:component::shelly::config::step::credentials::data_description::password%]" } }, "confirm_discovery": { @@ -87,8 +95,15 @@ "description": "Bluetooth scanning can be active or passive. With active, the Shelly requests data from nearby devices; with passive, the Shelly receives unsolicited data from nearby devices.", "data": { "ble_scanner_mode": "Bluetooth scanner mode" + }, + "data_description": { + "ble_scanner_mode": "The scanner mode to use for Bluetooth scanning." } } + }, + "abort": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_scripts_support": "Device does not support scripts and cannot be used as a Bluetooth scanner." } }, "selector": { @@ -106,7 +121,6 @@ "state_attributes": { "detected": { "state": { - "unknown": "Unknown", "none": "None", "mild": "Mild", "heavy": "Heavy", @@ -139,9 +153,26 @@ } }, "sensor": { + "gas_detected": { + "state": { + "none": "None", + "mild": "Mild", + "heavy": "Heavy", + "test": "Test" + }, + "state_attributes": { + "options": { + "state": { + "none": "[%key:component::shelly::entity::sensor::gas_detected::state::none%]", + "mild": "[%key:component::shelly::entity::sensor::gas_detected::state::mild%]", + "heavy": "[%key:component::shelly::entity::sensor::gas_detected::state::heavy%]", + "test": "[%key:component::shelly::entity::sensor::gas_detected::state::test%]" + } + } + } + }, "operation": { "state": { - "unknown": "Unknown", "warmup": "Warm-up", "normal": "Normal", "fault": "Fault" @@ -157,6 +188,24 @@ } } }, + "self_test": { + "state": { + "not_completed": "Not completed", + "completed": "Completed", + "running": "Running", + "pending": "Pending" + }, + "state_attributes": { + "options": { + "state": { + "not_completed": "[%key:component::shelly::entity::sensor::self_test::state::not_completed%]", + "completed": "[%key:component::shelly::entity::sensor::self_test::state::completed%]", + "running": "[%key:component::shelly::entity::sensor::self_test::state::running%]", + "pending": "[%key:component::shelly::entity::sensor::self_test::state::pending%]" + } + } + } + }, "valve_status": { "state": { "checking": "Checking", @@ -164,12 +213,52 @@ "closing": "Closing", "failure": "Failure", "opened": "Opened", - "opening": "Opening", - "unknown": "[%key:component::shelly::entity::sensor::operation::state::unknown%]" + "opening": "Opening" } } } }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {device}, please update your credentials" + }, + "device_communication_error": { + "message": "Device communication error occurred for {device}" + }, + "device_communication_action_error": { + "message": "Device communication error occurred while calling action for {entity} of {device}" + }, + "device_not_found": { + "message": "{device} not found while configuring device automation triggers" + }, + "firmware_unsupported": { + "message": "{device} is running an unsupported firmware, please update the firmware" + }, + "invalid_trigger": { + "message": "Invalid device automation trigger (type, subtype): {trigger}" + }, + "ota_update_connection_error": { + "message": "Device communication error occurred while triggering OTA update for {device}" + }, + "ota_update_rpc_error": { + "message": "RPC call error occurred while triggering OTA update for {device}" + }, + "rpc_call_action_error": { + "message": "RPC call error occurred while calling action for {entity} of {device}" + }, + "update_error": { + "message": "An error occurred while retrieving data from {device}" + }, + "update_error_device_disconnected": { + "message": "An error occurred while retrieving data from {device} because it is disconnected" + }, + "update_error_reconnect_error": { + "message": "An error occurred while reconnecting to {device}" + }, + "update_error_sleeping_device": { + "message": "Sleeping device did not update within {period} seconds interval" + } + }, "issues": { "device_not_calibrated": { "title": "Shelly device {device_name} is not calibrated", diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index b1aa84b2640..12ce6dc70cd 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -25,7 +25,14 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity -from .const import CONF_SLEEP_PERIOD, OTA_BEGIN, OTA_ERROR, OTA_PROGRESS, OTA_SUCCESS +from .const import ( + CONF_SLEEP_PERIOD, + DOMAIN, + OTA_BEGIN, + OTA_ERROR, + OTA_PROGRESS, + OTA_SUCCESS, +) from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( RestEntityDescription, @@ -198,7 +205,11 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity): try: result = await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"Error starting OTA update: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: @@ -310,9 +321,20 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): try: await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"OTA update connection error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except RpcCallError as err: - raise HomeAssistantError(f"OTA update request error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_rpc_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 626cb287f64..474e2bb9410 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -59,6 +59,7 @@ from .const import ( GEN2_RELEASE_URL, LOGGER, RPC_INPUTS_EVENTS_TYPES, + SHAIR_MAX_WORK_HOURS, SHBTN_INPUTS_EVENTS_TYPES, SHBTN_MODELS, SHELLY_EMIT_EVENT_PATTERN, @@ -655,3 +656,28 @@ def is_rpc_exclude_from_relay( return True return is_rpc_channel_type_light(settings, ch) + + +def get_shelly_air_lamp_life(lamp_seconds: int) -> float: + """Return Shelly Air lamp life in percentage.""" + lamp_hours = lamp_seconds / 3600 + if lamp_hours >= SHAIR_MAX_WORK_HOURS: + return 0.0 + return 100 * (1 - lamp_hours / SHAIR_MAX_WORK_HOURS) + + +async def get_rpc_scripts_event_types( + device: RpcDevice, ignore_scripts: list[str] +) -> dict[int, list[str]]: + """Return a dict of all scripts and their event types.""" + script_instances = get_rpc_key_instances(device.status, "script") + script_events = {} + for script in script_instances: + script_name = get_rpc_entity_name(device, script) + if script_name in ignore_scripts: + continue + + script_id = int(script.split(":")[-1]) + script_events[script_id] = await get_rpc_script_event_types(device, script_id) + + return script_events diff --git a/homeassistant/components/sky_remote/config_flow.py b/homeassistant/components/sky_remote/config_flow.py index 13cddf99332..51cf9c9bf64 100644 --- a/homeassistant/components/sky_remote/config_flow.py +++ b/homeassistant/components/sky_remote/config_flow.py @@ -12,6 +12,8 @@ from homeassistant.helpers import config_validation as cv from .const import DEFAULT_PORT, DOMAIN, LEGACY_PORT +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, @@ -21,7 +23,7 @@ DATA_SCHEMA = vol.Schema( async def async_find_box_port(host: str) -> int: """Find port box uses for communication.""" - logging.debug("Attempting to find port to connect to %s on", host) + _LOGGER.debug("Attempting to find port to connect to %s on", host) remote = RemoteControl(host, DEFAULT_PORT) try: await remote.check_connectable() @@ -46,12 +48,12 @@ class SkyRemoteConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - logging.debug("user_input: %s", user_input) + _LOGGER.debug("user_input: %s", user_input) self._async_abort_entries_match(user_input) try: port = await async_find_box_port(user_input[CONF_HOST]) except SkyBoxConnectionError: - logging.exception("while finding port of skybox") + _LOGGER.exception("While finding port of skybox") errors["base"] = "cannot_connect" else: return self.async_create_entry( diff --git a/homeassistant/components/skybell/config_flow.py b/homeassistant/components/skybell/config_flow.py index a32441f4cf8..9893d0dd93a 100644 --- a/homeassistant/components/skybell/config_flow.py +++ b/homeassistant/components/skybell/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aioskybell import Skybell, exceptions @@ -14,6 +15,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Skybell.""" @@ -95,6 +98,7 @@ class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): return None, "invalid_auth" except exceptions.SkybellException: return None, "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return None, "unknown" return skybell.user_id, None diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 67514ff0d50..10efa4bc4f2 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -25,7 +25,7 @@ }, "zeroconf_confirm": { "title": "Confirm setup for Slide", - "description": "Do you want to setup {host}?" + "description": "Do you want to set up {host}?" } }, "abort": { diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json index 2966b5cd753..3037fbc98f6 100644 --- a/homeassistant/components/smappee/strings.json +++ b/homeassistant/components/smappee/strings.json @@ -15,7 +15,7 @@ } }, "zeroconf_confirm": { - "description": "Do you want to add the Smappee device with serialnumber `{serialnumber}` to Home Assistant?", + "description": "Do you want to add the Smappee device with serial number `{serialnumber}` to Home Assistant?", "title": "Discovered Smappee device" }, "pick_implementation": { diff --git a/homeassistant/components/smart_meter_texas/__init__.py b/homeassistant/components/smart_meter_texas/__init__.py index 1cd7df68e91..ce87b85c322 100644 --- a/homeassistant/components/smart_meter_texas/__init__.py +++ b/homeassistant/components/smart_meter_texas/__init__.py @@ -3,7 +3,7 @@ import logging import ssl -from smart_meter_texas import Account, Client, ClientSSLContext +from smart_meter_texas import Account, Client from smart_meter_texas.exceptions import ( SmartMeterTexasAPIError, SmartMeterTexasAuthError, @@ -16,6 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.ssl import get_default_context from .const import ( DATA_COORDINATOR, @@ -38,8 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: account = Account(username, password) - client_ssl_context = ClientSSLContext() - ssl_context = await client_ssl_context.get_ssl_context() + ssl_context = get_default_context() smart_meter_texas_data = SmartMeterTexasData(hass, entry, account, ssl_context) try: diff --git a/homeassistant/components/smart_meter_texas/config_flow.py b/homeassistant/components/smart_meter_texas/config_flow.py index b60855b62c8..18a3716e1b9 100644 --- a/homeassistant/components/smart_meter_texas/config_flow.py +++ b/homeassistant/components/smart_meter_texas/config_flow.py @@ -4,7 +4,7 @@ import logging from typing import Any from aiohttp import ClientError -from smart_meter_texas import Account, Client, ClientSSLContext +from smart_meter_texas import Account, Client from smart_meter_texas.exceptions import ( SmartMeterTexasAPIError, SmartMeterTexasAuthError, @@ -16,6 +16,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client +from homeassistant.util.ssl import get_default_context from .const import DOMAIN @@ -31,8 +32,7 @@ async def validate_input(hass: HomeAssistant, data): Data has the keys from DATA_SCHEMA with values provided by the user. """ - client_ssl_context = ClientSSLContext() - ssl_context = await client_ssl_context.get_ssl_context() + ssl_context = get_default_context() client_session = aiohttp_client.async_get_clientsession(hass) account = Account(data["username"], data["password"]) client = Client(client_session, account, ssl_context) diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index 3e0e66e890f..20325e7d3e5 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -3,40 +3,61 @@ from __future__ import annotations from collections.abc import Callable +import contextlib from dataclasses import dataclass +from http import HTTPStatus import logging -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Any, cast -from aiohttp import ClientError +from aiohttp import ClientResponseError from pysmartthings import ( Attribute, Capability, + ComponentStatus, Device, DeviceEvent, + Lifecycle, Scene, SmartThings, SmartThingsAuthenticationFailedError, + SmartThingsConnectionError, + SmartThingsSinkError, Status, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + ATTR_CONNECTIONS, + ATTR_HW_VERSION, + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_SW_VERSION, + ATTR_VIA_DEVICE, + CONF_ACCESS_TOKEN, + CONF_TOKEN, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, ) +from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries from .const import ( + BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES, CONF_INSTALLED_APP_ID, CONF_LOCATION_ID, + CONF_SUBSCRIPTION_ID, DOMAIN, EVENT_BUTTON, MAIN, OLD_DATA, + SENSOR_ATTRIBUTES_TO_CAPABILITIES, ) _LOGGER = logging.getLogger(__name__) @@ -57,21 +78,28 @@ class FullDevice: """Define an object to hold device data.""" device: Device - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]] + status: dict[str, ComponentStatus] type SmartThingsConfigEntry = ConfigEntry[SmartThingsData] PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.CLIMATE, Platform.COVER, + Platform.EVENT, Platform.FAN, Platform.LIGHT, Platform.LOCK, + Platform.MEDIA_PLAYER, + Platform.NUMBER, Platform.SCENE, + Platform.SELECT, Platform.SENSOR, Platform.SWITCH, + Platform.UPDATE, + Platform.VALVE, ] @@ -86,7 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) try: await session.async_ensure_token_valid() - except ClientError as err: + except ClientResponseError as err: + if err.status == HTTPStatus.BAD_REQUEST: + raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from err raise ConfigEntryNotReady from err client = SmartThings(session=async_get_clientsession(hass)) @@ -100,6 +130,59 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) client.refresh_token_function = _refresh_token + def _handle_max_connections() -> None: + _LOGGER.debug( + "We hit the limit of max connections or we could not remove the old one, so retrying" + ) + hass.config_entries.async_schedule_reload(entry.entry_id) + + client.max_connections_reached_callback = _handle_max_connections + + def _handle_new_subscription_identifier(identifier: str | None) -> None: + """Handle a new subscription identifier.""" + hass.config_entries.async_update_entry( + entry, + data={ + **entry.data, + CONF_SUBSCRIPTION_ID: identifier, + }, + ) + if identifier is not None: + _LOGGER.debug("Updating subscription ID to %s", identifier) + else: + _LOGGER.debug("Removing subscription ID") + + client.new_subscription_id_callback = _handle_new_subscription_identifier + + if (old_identifier := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: + _LOGGER.debug("Trying to delete old subscription %s", old_identifier) + try: + await client.delete_subscription(old_identifier) + except SmartThingsConnectionError as err: + raise ConfigEntryNotReady("Could not delete old subscription") from err + + _LOGGER.debug("Trying to create a new subscription") + try: + subscription = await client.create_subscription( + entry.data[CONF_LOCATION_ID], + entry.data[CONF_TOKEN][CONF_INSTALLED_APP_ID], + ) + except SmartThingsSinkError as err: + _LOGGER.exception("Couldn't create a new subscription") + raise ConfigEntryNotReady from err + subscription_id = subscription.subscription_id + _handle_new_subscription_identifier(subscription_id) + + entry.async_create_background_task( + hass, + client.subscribe( + entry.data[CONF_LOCATION_ID], + entry.data[CONF_TOKEN][CONF_INSTALLED_APP_ID], + subscription, + ), + "smartthings_socket", + ) + device_status: dict[str, FullDevice] = {} try: rooms = { @@ -114,30 +197,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) raise ConfigEntryAuthFailed from err device_registry = dr.async_get(hass) - for dev in device_status.values(): - for component in dev.device.components: - if component.id == MAIN and Capability.BRIDGE in component.capabilities: - assert dev.device.hub - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, dev.device.device_id)}, - connections=( - {(dr.CONNECTION_NETWORK_MAC, dev.device.hub.mac_address)} - if dev.device.hub.mac_address - else set() - ), - name=dev.device.label, - sw_version=dev.device.hub.firmware_version, - model=dev.device.hub.hardware_type, - suggested_area=( - rooms.get(dev.device.room_id) if dev.device.room_id else None - ), - ) + create_devices(device_registry, device_status, entry, rooms) + scenes = { scene.scene_id: scene for scene in await client.get_scenes(location_id=entry.data[CONF_LOCATION_ID]) } + def handle_deleted_device(device_id: str) -> None: + """Handle a deleted device.""" + dev_entry = device_registry.async_get_device( + identifiers={(DOMAIN, device_id)}, + ) + if dev_entry is not None: + device_registry.async_update_device( + dev_entry.id, remove_config_entry_id=entry.entry_id + ) + + entry.async_on_unload( + client.add_device_lifecycle_event_listener( + Lifecycle.DELETE, handle_deleted_device + ) + ) + entry.runtime_data = SmartThingsData( devices={ device_id: device @@ -149,6 +231,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) rooms=rooms, ) + # Events are deprecated and will be removed in 2025.10 def handle_button_press(event: DeviceEvent) -> None: """Handle a button press.""" if ( @@ -171,12 +254,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) client.add_unspecified_device_event_listener(handle_button_press) ) - entry.async_create_background_task( - hass, - client.subscribe( - entry.data[CONF_LOCATION_ID], entry.data[CONF_TOKEN][CONF_INSTALLED_APP_ID] - ), - "smartthings_webhook", + async def _handle_shutdown(_: Event) -> None: + """Handle shutdown.""" + await client.delete_subscription(subscription_id) + + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _handle_shutdown) ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -201,6 +284,10 @@ async def async_unload_entry( hass: HomeAssistant, entry: SmartThingsConfigEntry ) -> bool: """Unload a config entry.""" + client = entry.runtime_data.client + if (subscription_id := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: + with contextlib.suppress(SmartThingsConnectionError): + await client.delete_subscription(subscription_id) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -213,44 +300,177 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, version=3, data={OLD_DATA: dict(entry.data)} ) + if entry.minor_version < 2: + + def migrate_entities(entity_entry: RegistryEntry) -> dict[str, Any] | None: + if entity_entry.domain == "binary_sensor": + device_id, attribute = entity_entry.unique_id.split(".") + if ( + capability := BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES.get( + attribute + ) + ) is None: + return None + new_unique_id = ( + f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}" + ) + return { + "new_unique_id": new_unique_id, + } + if entity_entry.domain in {"cover", "climate", "fan", "light", "lock"}: + return {"new_unique_id": f"{entity_entry.unique_id}_{MAIN}"} + if entity_entry.domain == "sensor": + delimiter = "." if " " not in entity_entry.unique_id else " " + if delimiter not in entity_entry.unique_id: + return None + device_id, attribute = entity_entry.unique_id.split( + delimiter, maxsplit=1 + ) + if ( + capability := SENSOR_ATTRIBUTES_TO_CAPABILITIES.get(attribute) + ) is None: + if attribute in { + "energy_meter", + "power_meter", + "deltaEnergy_meter", + "powerEnergy_meter", + "energySaved_meter", + }: + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.POWER_CONSUMPTION_REPORT}_{Attribute.POWER_CONSUMPTION}_{attribute}", + } + if attribute in { + "X Coordinate", + "Y Coordinate", + "Z Coordinate", + }: + new_attribute = { + "X Coordinate": "x_coordinate", + "Y Coordinate": "y_coordinate", + "Z Coordinate": "z_coordinate", + }[attribute] + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.THREE_AXIS}_{Attribute.THREE_AXIS}_{new_attribute}", + } + if attribute == Attribute.MACHINE_STATE: + capability = determine_machine_type( + hass, entry.entry_id, device_id + ) + if capability is None: + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + + if entity_entry.domain == "switch": + return { + "new_unique_id": f"{entity_entry.unique_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + } + + return None + + await async_migrate_entries(hass, entry.entry_id, migrate_entities) + hass.config_entries.async_update_entry( + entry, + minor_version=2, + ) + return True +def determine_machine_type( + hass: HomeAssistant, + entry_id: str, + device_id: str, +) -> Capability | None: + """Determine the machine type for a device.""" + entity_registry = er.async_get(hass) + entries = er.async_entries_for_config_entry(entity_registry, entry_id) + device_entries = [entry for entry in entries if device_id in entry.unique_id] + for entry in device_entries: + if Attribute.DISHWASHER_JOB_STATE in entry.unique_id: + return Capability.DISHWASHER_OPERATING_STATE + if Attribute.WASHER_JOB_STATE in entry.unique_id: + return Capability.WASHER_OPERATING_STATE + if Attribute.DRYER_JOB_STATE in entry.unique_id: + return Capability.DRYER_OPERATING_STATE + if Attribute.OVEN_JOB_STATE in entry.unique_id: + return Capability.OVEN_OPERATING_STATE + return None + + +def create_devices( + device_registry: dr.DeviceRegistry, + devices: dict[str, FullDevice], + entry: SmartThingsConfigEntry, + rooms: dict[str, str], +) -> None: + """Create devices in the device registry.""" + for device in sorted( + devices.values(), key=lambda d: d.device.parent_device_id or "" + ): + kwargs: dict[str, Any] = {} + if device.device.hub is not None: + kwargs = { + ATTR_SW_VERSION: device.device.hub.firmware_version, + ATTR_MODEL: device.device.hub.hardware_type, + } + if device.device.hub.mac_address: + kwargs[ATTR_CONNECTIONS] = { + (dr.CONNECTION_NETWORK_MAC, device.device.hub.mac_address) + } + if device.device.parent_device_id: + kwargs[ATTR_VIA_DEVICE] = (DOMAIN, device.device.parent_device_id) + if (ocf := device.device.ocf) is not None: + kwargs.update( + { + ATTR_MANUFACTURER: ocf.manufacturer_name, + ATTR_MODEL: ( + (ocf.model_number.split("|")[0]) if ocf.model_number else None + ), + ATTR_HW_VERSION: ocf.hardware_version, + ATTR_SW_VERSION: ocf.firmware_version, + } + ) + if (viper := device.device.viper) is not None: + kwargs.update( + { + ATTR_MANUFACTURER: viper.manufacturer_name, + ATTR_MODEL: viper.model_name, + ATTR_HW_VERSION: viper.hardware_version, + ATTR_SW_VERSION: viper.software_version, + } + ) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, device.device.device_id)}, + configuration_url="https://account.smartthings.com", + name=device.device.label, + suggested_area=( + rooms.get(device.device.room_id) if device.device.room_id else None + ), + **kwargs, + ) + + KEEP_CAPABILITY_QUIRK: dict[ Capability | str, Callable[[dict[Attribute | str, Status]], bool] ] = { + Capability.DRYER_OPERATING_STATE: ( + lambda status: status[Attribute.SUPPORTED_MACHINE_STATES].value is not None + ), Capability.WASHER_OPERATING_STATE: ( lambda status: status[Attribute.SUPPORTED_MACHINE_STATES].value is not None ), Capability.DEMAND_RESPONSE_LOAD_CONTROL: lambda _: True, } -POWER_CONSUMPTION_FIELDS = { - "energy", - "power", - "deltaEnergy", - "powerEnergy", - "energySaved", -} -CAPABILITY_VALIDATION: dict[ - Capability | str, Callable[[dict[Attribute | str, Status]], bool] -] = { - Capability.POWER_CONSUMPTION_REPORT: ( - lambda status: ( - (power_consumption := status[Attribute.POWER_CONSUMPTION].value) is not None - and all( - field in cast(dict, power_consumption) - for field in POWER_CONSUMPTION_FIELDS - ) - ) - ) -} - - -def process_status( - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]], -) -> dict[str, dict[Capability | str, dict[Attribute | str, Status]]]: +def process_status(status: dict[str, ComponentStatus]) -> dict[str, ComponentStatus]: """Remove disabled capabilities from status.""" if (main_component := status.get(MAIN)) is None: return status @@ -270,8 +490,4 @@ def process_status( or not KEEP_CAPABILITY_QUIRK[capability](main_component[capability]) ): del main_component[capability] - for capability in list(main_component): - if capability in CAPABILITY_VALIDATION: - if not CAPABILITY_VALIDATION[capability](main_component[capability]): - del main_component[capability] return status diff --git a/homeassistant/components/smartthings/binary_sensor.py b/homeassistant/components/smartthings/binary_sensor.py index 080a90440be..bd09f1725d3 100644 --- a/homeassistant/components/smartthings/binary_sensor.py +++ b/homeassistant/components/smartthings/binary_sensor.py @@ -2,21 +2,30 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from pysmartthings import Attribute, Capability, SmartThings +from pysmartthings import Attribute, Capability, Category, SmartThings, Status +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import FullDevice, SmartThingsConfigEntry -from .const import MAIN +from .const import DOMAIN, MAIN from .entity import SmartThingsEntity @@ -25,6 +34,13 @@ class SmartThingsBinarySensorEntityDescription(BinarySensorEntityDescription): """Describe a SmartThings binary sensor entity.""" is_on_key: str + category_device_class: dict[Category | str, BinarySensorDeviceClass] | None = None + category: set[Category] | None = None + exists_fn: Callable[[str], bool] | None = None + component_translation_key: dict[str, str] | None = None + deprecated_fn: Callable[ + [dict[str, dict[Capability | str, dict[Attribute | str, Status]]]], str | None + ] = lambda _: None CAPABILITY_TO_SENSORS: dict[ @@ -43,6 +59,29 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.CONTACT, device_class=BinarySensorDeviceClass.DOOR, is_on_key="open", + category_device_class={ + Category.GARAGE_DOOR: BinarySensorDeviceClass.GARAGE_DOOR, + Category.DOOR: BinarySensorDeviceClass.DOOR, + Category.WINDOW: BinarySensorDeviceClass.WINDOW, + }, + exists_fn=lambda key: key in {"freezer", "cooler"}, + component_translation_key={ + "freezer": "freezer_door", + "cooler": "cooler_door", + }, + deprecated_fn=( + lambda status: "fridge_door" + if "freezer" in status and "cooler" in status + else None + ), + ) + }, + Capability.CUSTOM_DRYER_WRINKLE_PREVENT: { + Attribute.OPERATING_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.OPERATING_STATE, + translation_key="dryer_wrinkle_prevent_active", + is_on_key="running", + entity_category=EntityCategory.DIAGNOSTIC, ) }, Capability.FILTER_STATUS: { @@ -53,6 +92,13 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="replace", ) }, + Capability.SAMSUNG_CE_KIDS_LOCK: { + Attribute.LOCK_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.LOCK_STATE, + translation_key="child_lock", + is_on_key="locked", + ) + }, Capability.MOTION_SENSOR: { Attribute.MOTION: SmartThingsBinarySensorEntityDescription( key=Attribute.MOTION, @@ -67,6 +113,13 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="present", ) }, + Capability.REMOTE_CONTROL_STATUS: { + Attribute.REMOTE_CONTROL_ENABLED: SmartThingsBinarySensorEntityDescription( + key=Attribute.REMOTE_CONTROL_ENABLED, + translation_key="remote_control", + is_on_key="true", + ) + }, Capability.SOUND_SENSOR: { Attribute.SOUND: SmartThingsBinarySensorEntityDescription( key=Attribute.SOUND, @@ -74,6 +127,21 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="detected", ) }, + Capability.SWITCH: { + Attribute.SWITCH: SmartThingsBinarySensorEntityDescription( + key=Attribute.SWITCH, + device_class=BinarySensorDeviceClass.POWER, + is_on_key="on", + category={ + Category.CLOTHING_CARE_MACHINE, + Category.COOKTOP, + Category.DISHWASHER, + Category.DRYER, + Category.MICROWAVE, + Category.WASHER, + }, + ) + }, Capability.TAMPER_ALERT: { Attribute.TAMPER: SmartThingsBinarySensorEntityDescription( key=Attribute.TAMPER, @@ -88,6 +156,7 @@ CAPABILITY_TO_SENSORS: dict[ translation_key="valve", device_class=BinarySensorDeviceClass.OPENING, is_on_key="open", + deprecated_fn=lambda _: "valve", ) }, Capability.WATER_SENSOR: { @@ -97,9 +166,25 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="wet", ) }, + Capability.SAMSUNG_CE_DOOR_STATE: { + Attribute.DOOR_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.DOOR_STATE, + translation_key="door", + device_class=BinarySensorDeviceClass.OPENING, + is_on_key="open", + ) + }, } +def get_main_component_category( + device: FullDevice, +) -> Category | str: + """Get the main component of a device.""" + main = device.device.components[MAIN] + return main.user_category or main.manufacturer_category + + async def async_setup_entry( hass: HomeAssistant, entry: SmartThingsConfigEntry, @@ -109,17 +194,21 @@ async def async_setup_entry( entry_data = entry.runtime_data async_add_entities( SmartThingsBinarySensor( - entry_data.client, - device, - description, - entry_data.rooms, - capability, - attribute, + entry_data.client, device, description, capability, attribute, component ) for device in entry_data.devices.values() for capability, attribute_map in CAPABILITY_TO_SENSORS.items() - if capability in device.status[MAIN] for attribute, description in attribute_map.items() + for component in device.status + if capability in device.status[component] + and ( + component == MAIN + or (description.exists_fn is not None and description.exists_fn(component)) + ) + and ( + not description.category + or get_main_component_category(device) in description.category + ) ) @@ -133,16 +222,33 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): client: SmartThings, device: FullDevice, entity_description: SmartThingsBinarySensorEntityDescription, - rooms: dict[str, str], capability: Capability, attribute: Attribute, + component: str, ) -> None: """Init the class.""" - super().__init__(client, device, rooms, {capability}) + super().__init__(client, device, {capability}, component=component) self._attribute = attribute self.capability = capability self.entity_description = entity_description - self._attr_unique_id = f"{device.device.device_id}.{attribute}" + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{attribute}_{attribute}" + if ( + entity_description.category_device_class + and (category := get_main_component_category(device)) + in entity_description.category_device_class + ): + self._attr_device_class = entity_description.category_device_class[category] + self._attr_name = None + if ( + entity_description.component_translation_key is not None + and ( + translation_key := entity_description.component_translation_key.get( + component + ) + ) + is not None + ): + self._attr_translation_key = translation_key @property def is_on(self) -> bool: @@ -151,3 +257,57 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): self.get_attribute_value(self.capability, self._attribute) == self.entity_description.is_on_key ) + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if (issue := self.entity_description.deprecated_fn(self.device.status)) is None: + return + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_{issue}_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_binary_{issue}", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if (issue := self.entity_description.deprecated_fn(self.device.status)) is None: + return + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_{issue}_{self.entity_id}" + ) diff --git a/homeassistant/components/smartthings/button.py b/homeassistant/components/smartthings/button.py new file mode 100644 index 00000000000..00fbaa0e2c4 --- /dev/null +++ b/homeassistant/components/smartthings/button.py @@ -0,0 +1,78 @@ +"""Support for button entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from pysmartthings import Capability, Command, SmartThings + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsButtonDescription(ButtonEntityDescription): + """Class describing SmartThings button entities.""" + + key: Capability + command: Command + + +CAPABILITIES_TO_BUTTONS: dict[Capability | str, SmartThingsButtonDescription] = { + Capability.OVEN_OPERATING_STATE: SmartThingsButtonDescription( + key=Capability.OVEN_OPERATING_STATE, + translation_key="stop", + command=Command.STOP, + ), + Capability.CUSTOM_WATER_FILTER: SmartThingsButtonDescription( + key=Capability.CUSTOM_WATER_FILTER, + translation_key="reset_water_filter", + command=Command.RESET_WATER_FILTER, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add button entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsButtonEntity( + entry_data.client, device, CAPABILITIES_TO_BUTTONS[capability] + ) + for device in entry_data.devices.values() + for capability in device.status[MAIN] + if capability in CAPABILITIES_TO_BUTTONS + ) + + +class SmartThingsButtonEntity(SmartThingsEntity, ButtonEntity): + """Define a SmartThings button.""" + + entity_description: SmartThingsButtonDescription + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsButtonDescription, + ) -> None: + """Initialize the instance.""" + super().__init__(client, device, set()) + self.entity_description = entity_description + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.command}" + + async def async_press(self) -> None: + """Press the button.""" + await self.execute_device_command( + self.entity_description.key, + self.entity_description.command, + ) diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index 650b0c5540a..e20f191352f 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -118,12 +118,12 @@ async def async_setup_entry( """Add climate entities for a config entry.""" entry_data = entry.runtime_data entities: list[ClimateEntity] = [ - SmartThingsAirConditioner(entry_data.client, entry_data.rooms, device) + SmartThingsAirConditioner(entry_data.client, device) for device in entry_data.devices.values() if all(capability in device.status[MAIN] for capability in AC_CAPABILITIES) ] entities.extend( - SmartThingsThermostat(entry_data.client, entry_data.rooms, device) + SmartThingsThermostat(entry_data.client, device) for device in entry_data.devices.values() if all( capability in device.status[MAIN] for capability in THERMOSTAT_CAPABILITIES @@ -137,14 +137,11 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): _attr_name = None - def __init__( - self, client: SmartThings, rooms: dict[str, str], device: FullDevice - ) -> None: + def __init__(self, client: SmartThings, device: FullDevice) -> None: """Init the class.""" super().__init__( client, device, - rooms, { Capability.THERMOSTAT_FAN_MODE, Capability.THERMOSTAT_MODE, @@ -321,10 +318,14 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): @property def temperature_unit(self) -> str: """Return the unit of measurement.""" - unit = self._internal_state[Capability.TEMPERATURE_MEASUREMENT][ - Attribute.TEMPERATURE - ].unit - assert unit + # Offline third party thermostats may not have a unit + # Since climate always requires a unit, default to Celsius + if ( + unit := self._internal_state[Capability.TEMPERATURE_MEASUREMENT][ + Attribute.TEMPERATURE + ].unit + ) is None: + return UnitOfTemperature.CELSIUS return UNIT_MAP[unit] @@ -334,14 +335,11 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): _attr_name = None _attr_preset_mode = None - def __init__( - self, client: SmartThings, rooms: dict[str, str], device: FullDevice - ) -> None: + def __init__(self, client: SmartThings, device: FullDevice) -> None: """Init the class.""" super().__init__( client, device, - rooms, { Capability.AIR_CONDITIONER_MODE, Capability.SWITCH, @@ -567,12 +565,15 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): def _determine_hvac_modes(self) -> list[HVACMode]: """Determine the supported HVAC modes.""" modes = [HVACMode.OFF] - modes.extend( - state - for mode in self.get_attribute_value( + if ( + ac_modes := self.get_attribute_value( Capability.AIR_CONDITIONER_MODE, Attribute.SUPPORTED_AC_MODES ) - if (state := AC_MODE_TO_STATE.get(mode)) is not None - if state not in modes - ) + ) is not None: + modes.extend( + state + for mode in ac_modes + if (state := AC_MODE_TO_STATE.get(mode)) is not None + if state not in modes + ) return modes diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index d2654348527..03c8e4bfa66 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -20,6 +20,7 @@ class SmartThingsConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle configuration of SmartThings integrations.""" VERSION = 3 + MINOR_VERSION = 2 DOMAIN = DOMAIN @property diff --git a/homeassistant/components/smartthings/const.py b/homeassistant/components/smartthings/const.py index a6d028aed06..a3ec9a38200 100644 --- a/homeassistant/components/smartthings/const.py +++ b/homeassistant/components/smartthings/const.py @@ -1,5 +1,7 @@ """Constants used by the SmartThings component and platforms.""" +from pysmartthings import Attribute, Capability + DOMAIN = "smartthings" SCOPES = [ @@ -33,4 +35,77 @@ CONF_REFRESH_TOKEN = "refresh_token" MAIN = "main" OLD_DATA = "old_data" +CONF_SUBSCRIPTION_ID = "subscription_id" EVENT_BUTTON = "smartthings.button" + +BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.ACCELERATION: Capability.ACCELERATION_SENSOR, + Attribute.CONTACT: Capability.CONTACT_SENSOR, + Attribute.FILTER_STATUS: Capability.FILTER_STATUS, + Attribute.MOTION: Capability.MOTION_SENSOR, + Attribute.PRESENCE: Capability.PRESENCE_SENSOR, + Attribute.SOUND: Capability.SOUND_SENSOR, + Attribute.TAMPER: Capability.TAMPER_ALERT, + Attribute.VALVE: Capability.VALVE, + Attribute.WATER: Capability.WATER_SENSOR, +} + +SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.LIGHTING_MODE: Capability.ACTIVITY_LIGHTING_MODE, + Attribute.AIR_CONDITIONER_MODE: Capability.AIR_CONDITIONER_MODE, + Attribute.AIR_QUALITY: Capability.AIR_QUALITY_SENSOR, + Attribute.ALARM: Capability.ALARM, + Attribute.BATTERY: Capability.BATTERY, + Attribute.BMI_MEASUREMENT: Capability.BODY_MASS_INDEX_MEASUREMENT, + Attribute.BODY_WEIGHT_MEASUREMENT: Capability.BODY_WEIGHT_MEASUREMENT, + Attribute.CARBON_DIOXIDE: Capability.CARBON_DIOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE_LEVEL: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.DISHWASHER_JOB_STATE: Capability.DISHWASHER_OPERATING_STATE, + Attribute.DRYER_MODE: Capability.DRYER_MODE, + Attribute.DRYER_JOB_STATE: Capability.DRYER_OPERATING_STATE, + Attribute.DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.FINE_DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.ENERGY: Capability.ENERGY_METER, + Attribute.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT: Capability.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT, + Attribute.FORMALDEHYDE_LEVEL: Capability.FORMALDEHYDE_MEASUREMENT, + Attribute.GAS_METER: Capability.GAS_METER, + Attribute.GAS_METER_CALORIFIC: Capability.GAS_METER, + Attribute.GAS_METER_TIME: Capability.GAS_METER, + Attribute.GAS_METER_VOLUME: Capability.GAS_METER, + Attribute.ILLUMINANCE: Capability.ILLUMINANCE_MEASUREMENT, + Attribute.INFRARED_LEVEL: Capability.INFRARED_LEVEL, + Attribute.INPUT_SOURCE: Capability.MEDIA_INPUT_SOURCE, + Attribute.PLAYBACK_REPEAT_MODE: Capability.MEDIA_PLAYBACK_REPEAT, + Attribute.PLAYBACK_SHUFFLE: Capability.MEDIA_PLAYBACK_SHUFFLE, + Attribute.PLAYBACK_STATUS: Capability.MEDIA_PLAYBACK, + Attribute.ODOR_LEVEL: Capability.ODOR_SENSOR, + Attribute.OVEN_MODE: Capability.OVEN_MODE, + Attribute.OVEN_JOB_STATE: Capability.OVEN_OPERATING_STATE, + Attribute.OVEN_SETPOINT: Capability.OVEN_SETPOINT, + Attribute.POWER: Capability.POWER_METER, + Attribute.POWER_SOURCE: Capability.POWER_SOURCE, + Attribute.REFRIGERATION_SETPOINT: Capability.REFRIGERATION_SETPOINT, + Attribute.HUMIDITY: Capability.RELATIVE_HUMIDITY_MEASUREMENT, + Attribute.ROBOT_CLEANER_CLEANING_MODE: Capability.ROBOT_CLEANER_CLEANING_MODE, + Attribute.ROBOT_CLEANER_MOVEMENT: Capability.ROBOT_CLEANER_MOVEMENT, + Attribute.ROBOT_CLEANER_TURBO_MODE: Capability.ROBOT_CLEANER_TURBO_MODE, + Attribute.LQI: Capability.SIGNAL_STRENGTH, + Attribute.RSSI: Capability.SIGNAL_STRENGTH, + Attribute.SMOKE: Capability.SMOKE_DETECTOR, + Attribute.TEMPERATURE: Capability.TEMPERATURE_MEASUREMENT, + Attribute.COOLING_SETPOINT: Capability.THERMOSTAT_COOLING_SETPOINT, + Attribute.THERMOSTAT_FAN_MODE: Capability.THERMOSTAT_FAN_MODE, + Attribute.HEATING_SETPOINT: Capability.THERMOSTAT_HEATING_SETPOINT, + Attribute.THERMOSTAT_MODE: Capability.THERMOSTAT_MODE, + Attribute.THERMOSTAT_OPERATING_STATE: Capability.THERMOSTAT_OPERATING_STATE, + Attribute.THERMOSTAT_SETPOINT: Capability.THERMOSTAT_SETPOINT, + Attribute.TV_CHANNEL: Capability.TV_CHANNEL, + Attribute.TV_CHANNEL_NAME: Capability.TV_CHANNEL, + Attribute.TVOC_LEVEL: Capability.TVOC_MEASUREMENT, + Attribute.ULTRAVIOLET_INDEX: Capability.ULTRAVIOLET_INDEX, + Attribute.VERY_FINE_DUST_LEVEL: Capability.VERY_FINE_DUST_SENSOR, + Attribute.VOLTAGE: Capability.VOLTAGE_MEASUREMENT, + Attribute.WASHER_MODE: Capability.WASHER_MODE, + Attribute.WASHER_JOB_STATE: Capability.WASHER_OPERATING_STATE, +} diff --git a/homeassistant/components/smartthings/cover.py b/homeassistant/components/smartthings/cover.py index 564de8443b1..0b68409443d 100644 --- a/homeassistant/components/smartthings/cover.py +++ b/homeassistant/components/smartthings/cover.py @@ -41,9 +41,7 @@ async def async_setup_entry( """Add covers for a config entry.""" entry_data = entry.runtime_data async_add_entities( - SmartThingsCover( - entry_data.client, device, entry_data.rooms, Capability(capability) - ) + SmartThingsCover(entry_data.client, device, Capability(capability)) for device in entry_data.devices.values() for capability in device.status[MAIN] if capability in CAPABILITIES @@ -60,14 +58,12 @@ class SmartThingsCover(SmartThingsEntity, CoverEntity): self, client: SmartThings, device: FullDevice, - rooms: dict[str, str], capability: Capability, ) -> None: """Initialize the cover class.""" super().__init__( client, device, - rooms, { capability, Capability.BATTERY, @@ -125,7 +121,12 @@ class SmartThingsCover(SmartThingsEntity, CoverEntity): self._attr_current_cover_position = self.get_attribute_value( Capability.SWITCH_LEVEL, Attribute.LEVEL ) + elif self.supports_capability(Capability.WINDOW_SHADE_LEVEL): + self._attr_current_cover_position = self.get_attribute_value( + Capability.WINDOW_SHADE_LEVEL, Attribute.SHADE_LEVEL + ) + # Deprecated, remove in 2025.10 self._attr_extra_state_attributes = {} if self.supports_capability(Capability.BATTERY): self._attr_extra_state_attributes[ATTR_BATTERY_LEVEL] = ( diff --git a/homeassistant/components/smartthings/diagnostics.py b/homeassistant/components/smartthings/diagnostics.py index dbc5d4e8224..04517112802 100644 --- a/homeassistant/components/smartthings/diagnostics.py +++ b/homeassistant/components/smartthings/diagnostics.py @@ -23,7 +23,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" client = entry.runtime_data.client - return await client.get_raw_devices() + return {"devices": await client.get_raw_devices()} async def async_get_device_diagnostics( diff --git a/homeassistant/components/smartthings/entity.py b/homeassistant/components/smartthings/entity.py index c2637174a5c..5544297a4c6 100644 --- a/homeassistant/components/smartthings/entity.py +++ b/homeassistant/components/smartthings/entity.py @@ -8,9 +8,9 @@ from pysmartthings import ( Attribute, Capability, Command, + ComponentStatus, DeviceEvent, SmartThings, - Status, ) from homeassistant.helpers.device_registry import DeviceInfo @@ -30,52 +30,24 @@ class SmartThingsEntity(Entity): self, client: SmartThings, device: FullDevice, - rooms: dict[str, str], capabilities: set[Capability], + *, + component: str = MAIN, ) -> None: """Initialize the instance.""" self.client = client self.capabilities = capabilities - self._internal_state: dict[Capability | str, dict[Attribute | str, Status]] = { - capability: device.status[MAIN][capability] + self.component = component + self._internal_state: ComponentStatus = { + capability: device.status[component][capability] for capability in capabilities - if capability in device.status[MAIN] + if capability in device.status[component] } self.device = device - self._attr_unique_id = device.device.device_id + self._attr_unique_id = f"{device.device.device_id}_{component}" self._attr_device_info = DeviceInfo( - configuration_url="https://account.smartthings.com", identifiers={(DOMAIN, device.device.device_id)}, - name=device.device.label, - suggested_area=( - rooms.get(device.device.room_id) if device.device.room_id else None - ), ) - if device.device.parent_device_id: - self._attr_device_info["via_device"] = ( - DOMAIN, - device.device.parent_device_id, - ) - if (ocf := device.device.ocf) is not None: - self._attr_device_info.update( - { - "manufacturer": ocf.manufacturer_name, - "model": ( - (ocf.model_number.split("|")[0]) if ocf.model_number else None - ), - "hw_version": ocf.hardware_version, - "sw_version": ocf.firmware_version, - } - ) - if (viper := device.device.viper) is not None: - self._attr_device_info.update( - { - "manufacturer": viper.manufacturer_name, - "model": viper.model_name, - "hw_version": viper.hardware_version, - "sw_version": viper.software_version, - } - ) async def async_added_to_hass(self) -> None: """Subscribe to updates.""" @@ -84,7 +56,7 @@ class SmartThingsEntity(Entity): self.async_on_remove( self.client.add_device_capability_event_listener( self.device.device.device_id, - MAIN, + self.component, capability, self._update_handler, ) @@ -98,7 +70,7 @@ class SmartThingsEntity(Entity): def supports_capability(self, capability: Capability) -> bool: """Test if device supports a capability.""" - return capability in self.device.status[MAIN] + return capability in self.device.status[self.component] def get_attribute_value(self, capability: Capability, attribute: Attribute) -> Any: """Get the value of a device attribute.""" @@ -123,5 +95,5 @@ class SmartThingsEntity(Entity): if argument is not None: kwargs["argument"] = argument await self.client.execute_device_command( - self.device.device.device_id, capability, command, MAIN, **kwargs + self.device.device.device_id, capability, command, self.component, **kwargs ) diff --git a/homeassistant/components/smartthings/event.py b/homeassistant/components/smartthings/event.py new file mode 100644 index 00000000000..8b413f04713 --- /dev/null +++ b/homeassistant/components/smartthings/event.py @@ -0,0 +1,62 @@ +"""Support for events through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import cast + +from pysmartthings import Attribute, Capability, Component, DeviceEvent, SmartThings + +from homeassistant.components.event import EventDeviceClass, EventEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add events for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsButtonEvent( + entry_data.client, device, device.device.components[component] + ) + for device in entry_data.devices.values() + for component, capabilities in device.status.items() + if Capability.BUTTON in capabilities + ) + + +class SmartThingsButtonEvent(SmartThingsEntity, EventEntity): + """Define a SmartThings event.""" + + _attr_device_class = EventDeviceClass.BUTTON + _attr_translation_key = "button" + + def __init__( + self, + client: SmartThings, + device: FullDevice, + component: Component, + ) -> None: + """Init the class.""" + super().__init__(client, device, {Capability.BUTTON}, component=component.id) + self._attr_name = component.label + self._attr_unique_id = ( + f"{device.device.device_id}_{component.id}_{Capability.BUTTON}" + ) + + @property + def event_types(self) -> list[str]: + """Return the event types.""" + return self.get_attribute_value( + Capability.BUTTON, Attribute.SUPPORTED_BUTTON_VALUES + ) + + def _update_handler(self, event: DeviceEvent) -> None: + self._trigger_event(cast(str, event.value)) + self.async_write_ha_state() diff --git a/homeassistant/components/smartthings/fan.py b/homeassistant/components/smartthings/fan.py index ef3d9702ce2..1c4cb4edc4a 100644 --- a/homeassistant/components/smartthings/fan.py +++ b/homeassistant/components/smartthings/fan.py @@ -31,7 +31,7 @@ async def async_setup_entry( """Add fans for a config entry.""" entry_data = entry.runtime_data async_add_entities( - SmartThingsFan(entry_data.client, entry_data.rooms, device) + SmartThingsFan(entry_data.client, device) for device in entry_data.devices.values() if Capability.SWITCH in device.status[MAIN] and any( @@ -51,14 +51,11 @@ class SmartThingsFan(SmartThingsEntity, FanEntity): _attr_name = None _attr_speed_count = int_states_in_range(SPEED_RANGE) - def __init__( - self, client: SmartThings, rooms: dict[str, str], device: FullDevice - ) -> None: + def __init__(self, client: SmartThings, device: FullDevice) -> None: """Init the class.""" super().__init__( client, device, - rooms, { Capability.SWITCH, Capability.FAN_SPEED, diff --git a/homeassistant/components/smartthings/icons.json b/homeassistant/components/smartthings/icons.json new file mode 100644 index 00000000000..214a9953a5a --- /dev/null +++ b/homeassistant/components/smartthings/icons.json @@ -0,0 +1,63 @@ +{ + "entity": { + "binary_sensor": { + "dryer_wrinkle_prevent_active": { + "default": "mdi:tumble-dryer", + "state": { + "on": "mdi:tumble-dryer-alert" + } + }, + "remote_control": { + "default": "mdi:remote-off", + "state": { + "on": "mdi:remote" + } + }, + "child_lock": { + "default": "mdi:lock-open", + "state": { + "on": "mdi:lock" + } + } + }, + "button": { + "reset_water_filter": { + "default": "mdi:reload" + }, + "stop": { + "default": "mdi:stop" + } + }, + "number": { + "washer_rinse_cycles": { + "default": "mdi:waves-arrow-up" + } + }, + "select": { + "operating_state": { + "state": { + "run": "mdi:play", + "pause": "mdi:pause", + "stop": "mdi:stop" + } + } + }, + "switch": { + "bubble_soak": { + "default": "mdi:water-off", + "state": { + "on": "mdi:water" + } + }, + "wrinkle_prevent": { + "default": "mdi:tumble-dryer", + "state": { + "off": "mdi:tumble-dryer-off" + } + }, + "ice_maker": { + "default": "mdi:delete-variant" + } + } + } +} diff --git a/homeassistant/components/smartthings/light.py b/homeassistant/components/smartthings/light.py index eee333f131f..1ad315bcd97 100644 --- a/homeassistant/components/smartthings/light.py +++ b/homeassistant/components/smartthings/light.py @@ -41,7 +41,7 @@ async def async_setup_entry( """Add lights for a config entry.""" entry_data = entry.runtime_data async_add_entities( - SmartThingsLight(entry_data.client, entry_data.rooms, device) + SmartThingsLight(entry_data.client, device) for device in entry_data.devices.values() if Capability.SWITCH in device.status[MAIN] and any(capability in device.status[MAIN] for capability in CAPABILITIES) @@ -71,14 +71,11 @@ class SmartThingsLight(SmartThingsEntity, LightEntity, RestoreEntity): # highest kelvin found supported across 20+ handlers. _attr_max_color_temp_kelvin = 9000 # 111 mireds - def __init__( - self, client: SmartThings, rooms: dict[str, str], device: FullDevice - ) -> None: + def __init__(self, client: SmartThings, device: FullDevice) -> None: """Initialize a SmartThingsLight.""" super().__init__( client, device, - rooms, { Capability.COLOR_CONTROL, Capability.COLOR_TEMPERATURE, @@ -150,14 +147,21 @@ class SmartThingsLight(SmartThingsEntity, LightEntity, RestoreEntity): """Update entity attributes when the device status has changed.""" # Brightness and transition if brightness_supported(self._attr_supported_color_modes): - self._attr_brightness = int( - convert_scale( - self.get_attribute_value(Capability.SWITCH_LEVEL, Attribute.LEVEL), - 100, - 255, - 0, + if ( + brightness := self.get_attribute_value( + Capability.SWITCH_LEVEL, Attribute.LEVEL + ) + ) is None: + self._attr_brightness = None + else: + self._attr_brightness = int( + convert_scale( + brightness, + 100, + 255, + 0, + ) ) - ) # Color Temperature if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: self._attr_color_temp_kelvin = self.get_attribute_value( @@ -165,16 +169,21 @@ class SmartThingsLight(SmartThingsEntity, LightEntity, RestoreEntity): ) # Color if ColorMode.HS in self._attr_supported_color_modes: - self._attr_hs_color = ( - convert_scale( - self.get_attribute_value(Capability.COLOR_CONTROL, Attribute.HUE), - 100, - 360, - ), - self.get_attribute_value( - Capability.COLOR_CONTROL, Attribute.SATURATION - ), - ) + if ( + hue := self.get_attribute_value(Capability.COLOR_CONTROL, Attribute.HUE) + ) is None: + self._attr_hs_color = None + else: + self._attr_hs_color = ( + convert_scale( + hue, + 100, + 360, + ), + self.get_attribute_value( + Capability.COLOR_CONTROL, Attribute.SATURATION + ), + ) async def async_set_color(self, hs_color): """Set the color of the device.""" @@ -220,6 +229,10 @@ class SmartThingsLight(SmartThingsEntity, LightEntity, RestoreEntity): super()._update_handler(event) @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Return true if light is on.""" - return self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on" + if ( + state := self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) + ) is None: + return None + return state == "on" diff --git a/homeassistant/components/smartthings/lock.py b/homeassistant/components/smartthings/lock.py index 76a643e417e..f56ecd5d565 100644 --- a/homeassistant/components/smartthings/lock.py +++ b/homeassistant/components/smartthings/lock.py @@ -33,7 +33,7 @@ async def async_setup_entry( """Add locks for a config entry.""" entry_data = entry.runtime_data async_add_entities( - SmartThingsLock(entry_data.client, device, entry_data.rooms, {Capability.LOCK}) + SmartThingsLock(entry_data.client, device, {Capability.LOCK}) for device in entry_data.devices.values() if Capability.LOCK in device.status[MAIN] ) diff --git a/homeassistant/components/smartthings/manifest.json b/homeassistant/components/smartthings/manifest.json index 2a4e79bff58..49de0c79ce7 100644 --- a/homeassistant/components/smartthings/manifest.json +++ b/homeassistant/components/smartthings/manifest.json @@ -29,5 +29,6 @@ "documentation": "https://www.home-assistant.io/integrations/smartthings", "iot_class": "cloud_push", "loggers": ["pysmartthings"], - "requirements": ["pysmartthings==2.7.0"] + "quality_scale": "bronze", + "requirements": ["pysmartthings==3.0.0"] } diff --git a/homeassistant/components/smartthings/media_player.py b/homeassistant/components/smartthings/media_player.py new file mode 100644 index 00000000000..9a676d2efb6 --- /dev/null +++ b/homeassistant/components/smartthings/media_player.py @@ -0,0 +1,355 @@ +"""Support for media players through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import Any + +from pysmartthings import Attribute, Capability, Category, Command, SmartThings + +from homeassistant.components.media_player import ( + MediaPlayerDeviceClass, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + RepeatMode, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + +MEDIA_PLAYER_CAPABILITIES = ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, +) + +CONTROLLABLE_SOURCES = ["bluetooth", "wifi"] + +DEVICE_CLASS_MAP: dict[Category | str, MediaPlayerDeviceClass] = { + Category.NETWORK_AUDIO: MediaPlayerDeviceClass.SPEAKER, + Category.SPEAKER: MediaPlayerDeviceClass.SPEAKER, + Category.TELEVISION: MediaPlayerDeviceClass.TV, + Category.RECEIVER: MediaPlayerDeviceClass.RECEIVER, +} + +VALUE_TO_STATE = { + "buffering": MediaPlayerState.BUFFERING, + "paused": MediaPlayerState.PAUSED, + "playing": MediaPlayerState.PLAYING, + "stopped": MediaPlayerState.IDLE, + "fast forwarding": MediaPlayerState.BUFFERING, + "rewinding": MediaPlayerState.BUFFERING, +} + +REPEAT_MODE_TO_HA = { + "all": RepeatMode.ALL, + "one": RepeatMode.ONE, + "off": RepeatMode.OFF, +} + +HA_REPEAT_MODE_TO_SMARTTHINGS = {v: k for k, v in REPEAT_MODE_TO_HA.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add media players for a config entry.""" + entry_data = entry.runtime_data + + async_add_entities( + SmartThingsMediaPlayer(entry_data.client, device) + for device in entry_data.devices.values() + if all( + capability in device.status[MAIN] + for capability in MEDIA_PLAYER_CAPABILITIES + ) + ) + + +class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity): + """Define a SmartThings media player.""" + + _attr_name = None + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Initialize the media_player class.""" + super().__init__( + client, + device, + { + Capability.AUDIO_MUTE, + Capability.AUDIO_TRACK_DATA, + Capability.AUDIO_VOLUME, + Capability.MEDIA_INPUT_SOURCE, + Capability.MEDIA_PLAYBACK, + Capability.MEDIA_PLAYBACK_REPEAT, + Capability.MEDIA_PLAYBACK_SHUFFLE, + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Capability.SWITCH, + }, + ) + self._attr_supported_features = self._determine_features() + self._attr_device_class = DEVICE_CLASS_MAP.get( + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category, + ) + + def _determine_features(self) -> MediaPlayerEntityFeature: + flags = MediaPlayerEntityFeature(0) + playback_commands = self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.SUPPORTED_PLAYBACK_COMMANDS + ) + if "play" in playback_commands: + flags |= MediaPlayerEntityFeature.PLAY + if "pause" in playback_commands: + flags |= MediaPlayerEntityFeature.PAUSE + if "stop" in playback_commands: + flags |= MediaPlayerEntityFeature.STOP + if "rewind" in playback_commands: + flags |= MediaPlayerEntityFeature.PREVIOUS_TRACK + if "fastForward" in playback_commands: + flags |= MediaPlayerEntityFeature.NEXT_TRACK + if self.supports_capability(Capability.AUDIO_VOLUME): + flags |= ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP + ) + if self.supports_capability(Capability.AUDIO_MUTE): + flags |= MediaPlayerEntityFeature.VOLUME_MUTE + if self.supports_capability(Capability.SWITCH): + flags |= ( + MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF + ) + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + flags |= MediaPlayerEntityFeature.SELECT_SOURCE + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + flags |= MediaPlayerEntityFeature.SHUFFLE_SET + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + flags |= MediaPlayerEntityFeature.REPEAT_SET + return flags + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the media player off.""" + await self.execute_device_command( + Capability.SWITCH, + Command.OFF, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the media player on.""" + await self.execute_device_command( + Capability.SWITCH, + Command.ON, + ) + + async def async_mute_volume(self, mute: bool) -> None: + """Mute volume.""" + await self.execute_device_command( + Capability.AUDIO_MUTE, + Command.SET_MUTE, + argument="muted" if mute else "unmuted", + ) + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + argument=int(volume * 100), + ) + + async def async_volume_up(self) -> None: + """Increase volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + ) + + async def async_volume_down(self) -> None: + """Decrease volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + ) + + async def async_media_play(self) -> None: + """Play media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PLAY, + ) + + async def async_media_pause(self) -> None: + """Pause media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + ) + + async def async_media_stop(self) -> None: + """Stop media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.STOP, + ) + + async def async_media_previous_track(self) -> None: + """Previous track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.REWIND, + ) + + async def async_media_next_track(self) -> None: + """Next track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + ) + + async def async_select_source(self, source: str) -> None: + """Select source.""" + await self.execute_device_command( + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + argument=source, + ) + + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + argument="enabled" if shuffle else "disabled", + ) + + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + argument=HA_REPEAT_MODE_TO_SMARTTHINGS[repeat], + ) + + @property + def media_title(self) -> str | None: + """Title of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("title", None) + + @property + def media_artist(self) -> str | None: + """Artist of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("artist") + + @property + def state(self) -> MediaPlayerState | None: + """State of the media player.""" + if self.supports_capability(Capability.SWITCH): + if self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on": + if ( + self.source is not None + and self.source in CONTROLLABLE_SOURCES + and self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + in VALUE_TO_STATE + ): + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + return MediaPlayerState.ON + return MediaPlayerState.OFF + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + + @property + def is_volume_muted(self) -> bool: + """Returns if the volume is muted.""" + return ( + self.get_attribute_value(Capability.AUDIO_MUTE, Attribute.MUTE) == "muted" + ) + + @property + def volume_level(self) -> float: + """Volume level.""" + return self.get_attribute_value(Capability.AUDIO_VOLUME, Attribute.VOLUME) / 100 + + @property + def source(self) -> str | None: + """Input source.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + return None + + @property + def source_list(self) -> list[str] | None: + """List of input sources.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.SUPPORTED_INPUT_SOURCES + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Attribute.SUPPORTED_INPUT_SOURCES, + ) + return None + + @property + def shuffle(self) -> bool | None: + """Returns if shuffle mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + return ( + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_SHUFFLE, Attribute.PLAYBACK_SHUFFLE + ) + == "enabled" + ) + return None + + @property + def repeat(self) -> RepeatMode | None: + """Returns if repeat mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + return REPEAT_MODE_TO_HA[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_REPEAT, Attribute.PLAYBACK_REPEAT_MODE + ) + ] + return None diff --git a/homeassistant/components/smartthings/number.py b/homeassistant/components/smartthings/number.py new file mode 100644 index 00000000000..bb21520e271 --- /dev/null +++ b/homeassistant/components/smartthings/number.py @@ -0,0 +1,75 @@ +"""Support for number entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from pysmartthings import Attribute, Capability, Command, SmartThings + +from homeassistant.components.number import NumberEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add number entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsWasherRinseCyclesNumberEntity(entry_data.client, device) + for device in entry_data.devices.values() + if Capability.CUSTOM_WASHER_RINSE_CYCLES in device.status[MAIN] + ) + + +class SmartThingsWasherRinseCyclesNumberEntity(SmartThingsEntity, NumberEntity): + """Define a SmartThings number.""" + + _attr_translation_key = "washer_rinse_cycles" + _attr_native_step = 1.0 + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Initialize the instance.""" + super().__init__(client, device, {Capability.CUSTOM_WASHER_RINSE_CYCLES}) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{Capability.CUSTOM_WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}" + + @property + def options(self) -> list[int]: + """Return the list of options.""" + values = self.get_attribute_value( + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Attribute.SUPPORTED_WASHER_RINSE_CYCLES, + ) + return [int(value) for value in values] if values else [] + + @property + def native_value(self) -> float | None: + """Return the current value.""" + return int( + self.get_attribute_value( + Capability.CUSTOM_WASHER_RINSE_CYCLES, Attribute.WASHER_RINSE_CYCLES + ) + ) + + @property + def native_min_value(self) -> float: + """Return the minimum value.""" + return min(self.options) + + @property + def native_max_value(self) -> float: + """Return the maximum value.""" + return max(self.options) + + async def async_set_native_value(self, value: float) -> None: + """Set the value.""" + await self.execute_device_command( + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Command.SET_WASHER_RINSE_CYCLES, + str(int(value)), + ) diff --git a/homeassistant/components/smartthings/quality_scale.yaml b/homeassistant/components/smartthings/quality_scale.yaml new file mode 100644 index 00000000000..be8a9039617 --- /dev/null +++ b/homeassistant/components/smartthings/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration works via push. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No options to configure + docs-installation-parameters: + status: exempt + comment: No parameters needed during installation + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: done + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration connects via the cloud. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that are disabled by default. + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/smartthings/select.py b/homeassistant/components/smartthings/select.py new file mode 100644 index 00000000000..f0a483b1329 --- /dev/null +++ b/homeassistant/components/smartthings/select.py @@ -0,0 +1,127 @@ +"""Support for select entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from pysmartthings import Attribute, Capability, Command, SmartThings + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsSelectDescription(SelectEntityDescription): + """Class describing SmartThings select entities.""" + + key: Capability + requires_remote_control_status: bool + options_attribute: Attribute + status_attribute: Attribute + command: Command + + +CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = { + Capability.DISHWASHER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.DISHWASHER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), + Capability.DRYER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.DRYER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), + Capability.WASHER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.WASHER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add select entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsSelectEntity( + entry_data.client, device, CAPABILITIES_TO_SELECT[capability] + ) + for device in entry_data.devices.values() + for capability in device.status[MAIN] + if capability in CAPABILITIES_TO_SELECT + ) + + +class SmartThingsSelectEntity(SmartThingsEntity, SelectEntity): + """Define a SmartThings select.""" + + entity_description: SmartThingsSelectDescription + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsSelectDescription, + ) -> None: + """Initialize the instance.""" + capabilities = {entity_description.key} + if entity_description.requires_remote_control_status: + capabilities.add(Capability.REMOTE_CONTROL_STATUS) + super().__init__(client, device, capabilities) + self.entity_description = entity_description + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.status_attribute}_{entity_description.status_attribute}" + + @property + def options(self) -> list[str]: + """Return the list of options.""" + return self.get_attribute_value( + self.entity_description.key, self.entity_description.options_attribute + ) + + @property + def current_option(self) -> str | None: + """Return the current option.""" + return self.get_attribute_value( + self.entity_description.key, self.entity_description.status_attribute + ) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + if ( + self.entity_description.requires_remote_control_status + and self.get_attribute_value( + Capability.REMOTE_CONTROL_STATUS, Attribute.REMOTE_CONTROL_ENABLED + ) + == "false" + ): + raise ServiceValidationError( + "Can only be updated when remote control is enabled" + ) + await self.execute_device_command( + self.entity_description.key, + self.entity_description.command, + option, + ) diff --git a/homeassistant/components/smartthings/sensor.py b/homeassistant/components/smartthings/sensor.py index 5a2fdcf3854..f93b27337e1 100644 --- a/homeassistant/components/smartthings/sensor.py +++ b/homeassistant/components/smartthings/sensor.py @@ -5,10 +5,12 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass from datetime import datetime -from typing import Any +from typing import Any, cast -from pysmartthings import Attribute, Capability, SmartThings +from pysmartthings import Attribute, Capability, ComponentStatus, SmartThings, Status +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -29,11 +31,17 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from homeassistant.util import dt as dt_util from . import FullDevice, SmartThingsConfigEntry -from .const import MAIN +from .const import DOMAIN, MAIN from .entity import SmartThingsEntity THERMOSTAT_CAPABILITIES = { @@ -128,9 +136,11 @@ class SmartThingsSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[Any], str | float | int | datetime | None] = lambda value: value extra_state_attributes_fn: Callable[[Any], dict[str, Any]] | None = None - unique_id_separator: str = "." capability_ignore_list: list[set[Capability]] | None = None options_attribute: Attribute | None = None + exists_fn: Callable[[Status], bool] | None = None + use_temperature_unit: bool = False + deprecated: Callable[[ComponentStatus], str | None] | None = None CAPABILITY_TO_SENSORS: dict[ @@ -187,6 +197,17 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.VOLUME, translation_key="audio_volume", native_unit_of_measurement=PERCENTAGE, + deprecated=( + lambda status: "media_player" + if all( + capability in status + for capability in ( + Capability.AUDIO_MUTE, + Capability.MEDIA_PLAYBACK, + ) + ) + else None + ), ) ] }, @@ -223,7 +244,6 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, - # Haven't seen at devices yet Capability.CARBON_DIOXIDE_MEASUREMENT: { Attribute.CARBON_DIOXIDE: [ SmartThingsSensorEntityDescription( @@ -311,6 +331,7 @@ CAPABILITY_TO_SENSORS: dict[ translation_key="dryer_machine_state", options=WASHER_OPTIONS, device_class=SensorDeviceClass.ENUM, + deprecated=lambda _: "machine_state", ) ], Attribute.DRYER_JOB_STATE: [ @@ -462,24 +483,25 @@ CAPABILITY_TO_SENSORS: dict[ device_class=SensorDeviceClass.ENUM, options_attribute=Attribute.SUPPORTED_INPUT_SOURCES, value_fn=lambda value: value.lower() if value else None, + deprecated=lambda _: "media_player", ) ] }, - # part of the proposed spec, Haven't seen at devices yet Capability.MEDIA_PLAYBACK_REPEAT: { Attribute.PLAYBACK_REPEAT_MODE: [ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_REPEAT_MODE, translation_key="media_playback_repeat", + deprecated=lambda _: "media_player", ) ] }, - # part of the proposed spec, Haven't seen at devices yet Capability.MEDIA_PLAYBACK_SHUFFLE: { Attribute.PLAYBACK_SHUFFLE: [ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_SHUFFLE, translation_key="media_playback_shuffle", + deprecated=lambda _: "media_player", ) ] }, @@ -498,6 +520,7 @@ CAPABILITY_TO_SENSORS: dict[ ], device_class=SensorDeviceClass.ENUM, value_fn=lambda value: MEDIA_PLAYBACK_STATE_MAP.get(value, value), + deprecated=lambda _: "media_player", ) ] }, @@ -571,6 +594,10 @@ CAPABILITY_TO_SENSORS: dict[ SmartThingsSensorEntityDescription( key=Attribute.OVEN_SETPOINT, translation_key="oven_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + use_temperature_unit=True, + # Set the value to None if it is 0 F (-17 C) + value_fn=lambda value: None if value in {0, -17} else value, ) ] }, @@ -583,6 +610,10 @@ CAPABILITY_TO_SENSORS: dict[ native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda value: value["energy"] / 1000, suggested_display_precision=2, + exists_fn=lambda status: ( + (value := cast(dict | None, status.value)) is not None + and "energy" in value + ), ), SmartThingsSensorEntityDescription( key="power_meter", @@ -592,6 +623,10 @@ CAPABILITY_TO_SENSORS: dict[ value_fn=lambda value: value["power"], extra_state_attributes_fn=power_attributes, suggested_display_precision=2, + exists_fn=lambda status: ( + (value := cast(dict | None, status.value)) is not None + and "power" in value + ), ), SmartThingsSensorEntityDescription( key="deltaEnergy_meter", @@ -601,6 +636,10 @@ CAPABILITY_TO_SENSORS: dict[ native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda value: value["deltaEnergy"] / 1000, suggested_display_precision=2, + exists_fn=lambda status: ( + (value := cast(dict | None, status.value)) is not None + and "deltaEnergy" in value + ), ), SmartThingsSensorEntityDescription( key="powerEnergy_meter", @@ -610,6 +649,10 @@ CAPABILITY_TO_SENSORS: dict[ native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda value: value["powerEnergy"] / 1000, suggested_display_precision=2, + exists_fn=lambda status: ( + (value := cast(dict | None, status.value)) is not None + and "powerEnergy" in value + ), ), SmartThingsSensorEntityDescription( key="energySaved_meter", @@ -619,6 +662,10 @@ CAPABILITY_TO_SENSORS: dict[ native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda value: value["energySaved"] / 1000, suggested_display_precision=2, + exists_fn=lambda status: ( + (value := cast(dict | None, status.value)) is not None + and "energySaved" in value + ), ), ] }, @@ -652,6 +699,15 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, + Capability.RELATIVE_BRIGHTNESS: { + Attribute.BRIGHTNESS_INTENSITY: [ + SmartThingsSensorEntityDescription( + key=Attribute.BRIGHTNESS_INTENSITY, + translation_key="brightness_intensity", + state_class=SensorStateClass.MEASUREMENT, + ) + ] + }, Capability.RELATIVE_HUMIDITY_MEASUREMENT: { Attribute.HUMIDITY: [ SmartThingsSensorEntityDescription( @@ -823,21 +879,18 @@ CAPABILITY_TO_SENSORS: dict[ Capability.THREE_AXIS: { Attribute.THREE_AXIS: [ SmartThingsSensorEntityDescription( - key="X Coordinate", + key="x_coordinate", translation_key="x_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[0], ), SmartThingsSensorEntityDescription( - key="Y Coordinate", + key="y_coordinate", translation_key="y_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[1], ), SmartThingsSensorEntityDescription( - key="Z Coordinate", + key="z_coordinate", translation_key="z_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[2], ), ] @@ -877,6 +930,16 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, + Capability.VERY_FINE_DUST_SENSOR: { + Attribute.VERY_FINE_DUST_LEVEL: [ + SmartThingsSensorEntityDescription( + key=Attribute.VERY_FINE_DUST_LEVEL, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + device_class=SensorDeviceClass.PM1, + state_class=SensorStateClass.MEASUREMENT, + ) + ] + }, Capability.VOLTAGE_MEASUREMENT: { Attribute.VOLTAGE: [ SmartThingsSensorEntityDescription( @@ -903,6 +966,7 @@ CAPABILITY_TO_SENSORS: dict[ translation_key="washer_machine_state", options=WASHER_OPTIONS, device_class=SensorDeviceClass.ENUM, + deprecated=lambda _: "machine_state", ) ], Attribute.WASHER_JOB_STATE: [ @@ -964,7 +1028,6 @@ async def async_setup_entry( entry_data.client, device, description, - entry_data.rooms, capability, attribute, ) @@ -980,6 +1043,10 @@ async def async_setup_entry( for capability_list in description.capability_ignore_list ) ) + and ( + not description.exists_fn + or description.exists_fn(device.status[MAIN][capability][attribute]) + ) ) @@ -993,13 +1060,15 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): client: SmartThings, device: FullDevice, entity_description: SmartThingsSensorEntityDescription, - rooms: dict[str, str], capability: Capability, attribute: Attribute, ) -> None: """Init the class.""" - super().__init__(client, device, rooms, {capability}) - self._attr_unique_id = f"{device.device.device_id}{entity_description.unique_id_separator}{entity_description.key}" + capabilities_to_subscribe = {capability} + if entity_description.use_temperature_unit: + capabilities_to_subscribe.add(Capability.TEMPERATURE_MEASUREMENT) + super().__init__(client, device, capabilities_to_subscribe) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{capability}_{attribute}_{entity_description.key}" self._attribute = attribute self.capability = capability self.entity_description = entity_description @@ -1013,7 +1082,12 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): @property def native_unit_of_measurement(self) -> str | None: """Return the unit this state is expressed in.""" - unit = self._internal_state[self.capability][self._attribute].unit + if self.entity_description.use_temperature_unit: + unit = self._internal_state[Capability.TEMPERATURE_MEASUREMENT][ + Attribute.TEMPERATURE + ].unit + else: + unit = self._internal_state[self.capability][self._attribute].unit return ( UNITS.get(unit, unit) if unit @@ -1041,3 +1115,53 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): return [] return [option.lower() for option in options] return super().options + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if ( + not self.entity_description.deprecated + or (reason := self.entity_description.deprecated(self.device.status[MAIN])) + is None + ): + return + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + if not automations and not scripts: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + items_list = [ + f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})" + for integration, entities in ( + ("automation", automations), + ("script", scripts), + ) + for entity_id in entities + if (item := entity_reg.async_get(entity_id)) + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_{reason}_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_{reason}", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if ( + not self.entity_description.deprecated + or (reason := self.entity_description.deprecated(self.device.status[MAIN])) + is None + ): + return + async_delete_issue(self.hass, DOMAIN, f"deprecated_{reason}_{self.entity_id}") diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 844ebd12004..e4cf03178fd 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -33,13 +33,86 @@ "acceleration": { "name": "Acceleration" }, + "door": { + "name": "[%key:component::binary_sensor::entity_component::door::name%]" + }, + "dryer_wrinkle_prevent_active": { + "name": "Wrinkle prevent active" + }, "filter_status": { "name": "Filter status" }, + "freezer_door": { + "name": "Freezer door" + }, + "cooler_door": { + "name": "Cooler door" + }, + "remote_control": { + "name": "Remote control" + }, + "child_lock": { + "name": "Child lock" + }, "valve": { "name": "Valve" } }, + "button": { + "reset_water_filter": { + "name": "Reset water filter" + }, + "stop": { + "name": "[%key:common::action::stop%]" + } + }, + "event": { + "button": { + "state": { + "pushed": "Pushed", + "held": "Held", + "double": "Double", + "pushed_2x": "Pushed 2x", + "pushed_3x": "Pushed 3x", + "pushed_4x": "Pushed 4x", + "pushed_5x": "Pushed 5x", + "pushed_6x": "Pushed 6x", + "down": "Down", + "down_2x": "Down 2x", + "down_3x": "Down 3x", + "down_4x": "Down 4x", + "down_5x": "Down 5x", + "down_6x": "Down 6x", + "down_hold": "Down hold", + "up": "Up", + "up_2x": "Up 2x", + "up_3x": "Up 3x", + "up_4x": "Up 4x", + "up_5x": "Up 5x", + "up_6x": "Up 6x", + "up_hold": "Up hold", + "swipe_up": "Swipe up", + "swipe_down": "Swipe down", + "swipe_left": "Swipe left", + "swipe_right": "Swipe right" + } + } + }, + "number": { + "washer_rinse_cycles": { + "name": "Rinse cycles", + "unit_of_measurement": "cycles" + } + }, + "select": { + "operating_state": { + "state": { + "run": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]", + "pause": "[%key:common::state::paused%]", + "stop": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::stop%]" + } + } + }, "sensor": { "lighting_mode": { "name": "Activity lighting mode" @@ -275,6 +348,9 @@ "refrigeration_setpoint": { "name": "[%key:component::smartthings::entity::sensor::oven_setpoint::name%]" }, + "brightness_intensity": { + "name": "Brightness intensity" + }, "robot_cleaner_cleaning_mode": { "name": "Cleaning mode", "state": { @@ -389,6 +465,43 @@ "freeze_protection": "Freeze protection" } } + }, + "switch": { + "bubble_soak": { + "name": "Bubble Soak" + }, + "wrinkle_prevent": { + "name": "Wrinkle prevent" + }, + "ice_maker": { + "name": "Ice maker" + } + } + }, + "issues": { + "deprecated_binary_valve": { + "title": "Deprecated valve binary sensor detected in some automations or scripts", + "description": "The valve binary sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nA valve entity with controls is available and should be used going forward. Please use the new valve entity in the above automations or scripts to fix this issue." + }, + "deprecated_binary_fridge_door": { + "title": "Deprecated refrigerator door binary sensor detected in some automations or scripts", + "description": "The refrigerator door binary sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nSeparate entities for cooler and freezer door are available and should be used going forward. Please use them in the above automations or scripts to fix this issue." + }, + "deprecated_machine_state": { + "title": "Deprecated machine state sensor detected in some automations or scripts", + "description": "The machine state sensor `{entity}` is deprecated and is used in the following automations or scripts:\n{items}\n\nA select entity is now available for the machine state and should be used going forward. Please use the new select entity in the above automations or scripts to fix this issue." + }, + "deprecated_switch_appliance": { + "title": "Deprecated switch detected in some automations or scripts", + "description": "The switch `{entity}` is deprecated because the actions did not work, so it has been replaced with a binary sensor instead.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new binary sensor in the above automations or scripts to fix this issue." + }, + "deprecated_switch_media_player": { + "title": "[%key:component::smartthings::issues::deprecated_switch_appliance::title%]", + "description": "The switch `{entity}` is deprecated and a media player entity has been added to replace it.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new media player entity in the above automations or scripts to fix this issue." + }, + "deprecated_media_player": { + "title": "Deprecated sensor detected in some automations or scripts", + "description": "The sensor `{entity}` is deprecated because it has been replaced with a media player entity.\n\nThe sensor was used in the following automations or scripts:\n{items}\n\nPlease use the new media player entity in the above automations or scripts to fix this issue." } } } diff --git a/homeassistant/components/smartthings/switch.py b/homeassistant/components/smartthings/switch.py index f470a90bb39..e5b74de3241 100644 --- a/homeassistant/components/smartthings/switch.py +++ b/homeassistant/components/smartthings/switch.py @@ -2,16 +2,25 @@ from __future__ import annotations +from dataclasses import dataclass from typing import Any -from pysmartthings import Attribute, Capability, Command +from pysmartthings import Attribute, Capability, Category, Command, SmartThings -from homeassistant.components.switch import SwitchEntity +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) -from . import SmartThingsConfigEntry -from .const import MAIN +from . import FullDevice, SmartThingsConfigEntry +from .const import DOMAIN, MAIN from .entity import SmartThingsEntity CAPABILITIES = ( @@ -29,6 +38,52 @@ AC_CAPABILITIES = ( ) +@dataclass(frozen=True, kw_only=True) +class SmartThingsSwitchEntityDescription(SwitchEntityDescription): + """Describe a SmartThings switch entity.""" + + status_attribute: Attribute + component_translation_key: dict[str, str] | None = None + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsCommandSwitchEntityDescription(SmartThingsSwitchEntityDescription): + """Describe a SmartThings switch entity.""" + + command: Command + + +SWITCH = SmartThingsSwitchEntityDescription( + key=Capability.SWITCH, + status_attribute=Attribute.SWITCH, + name=None, +) +CAPABILITY_TO_COMMAND_SWITCHES: dict[ + Capability | str, SmartThingsCommandSwitchEntityDescription +] = { + Capability.CUSTOM_DRYER_WRINKLE_PREVENT: SmartThingsCommandSwitchEntityDescription( + key=Capability.CUSTOM_DRYER_WRINKLE_PREVENT, + translation_key="wrinkle_prevent", + status_attribute=Attribute.DRYER_WRINKLE_PREVENT, + command=Command.SET_DRYER_WRINKLE_PREVENT, + ) +} +CAPABILITY_TO_SWITCHES: dict[Capability | str, SmartThingsSwitchEntityDescription] = { + Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK: SmartThingsSwitchEntityDescription( + key=Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK, + translation_key="bubble_soak", + status_attribute=Attribute.STATUS, + ), + Capability.SWITCH: SmartThingsSwitchEntityDescription( + key=Capability.SWITCH, + status_attribute=Attribute.SWITCH, + component_translation_key={ + "icemaker": "ice_maker", + }, + ), +} + + async def async_setup_entry( hass: HomeAssistant, entry: SmartThingsConfigEntry, @@ -36,37 +91,179 @@ async def async_setup_entry( ) -> None: """Add switches for a config entry.""" entry_data = entry.runtime_data - async_add_entities( - SmartThingsSwitch( - entry_data.client, device, entry_data.rooms, {Capability.SWITCH} - ) + entities: list[SmartThingsEntity] = [ + SmartThingsSwitch(entry_data.client, device, SWITCH, Capability.SWITCH) for device in entry_data.devices.values() if Capability.SWITCH in device.status[MAIN] and not any(capability in device.status[MAIN] for capability in CAPABILITIES) and not all(capability in device.status[MAIN] for capability in AC_CAPABILITIES) + ] + entities.extend( + SmartThingsCommandSwitch( + entry_data.client, + device, + description, + Capability(capability), + ) + for device in entry_data.devices.values() + for capability, description in CAPABILITY_TO_COMMAND_SWITCHES.items() + if capability in device.status[MAIN] ) + entities.extend( + SmartThingsSwitch( + entry_data.client, + device, + description, + Capability(capability), + component, + ) + for device in entry_data.devices.values() + for capability, description in CAPABILITY_TO_SWITCHES.items() + for component in device.status + if capability in device.status[component] + and ( + (description.component_translation_key is None and component == MAIN) + or ( + description.component_translation_key is not None + and component in description.component_translation_key + ) + ) + ) + async_add_entities(entities) class SmartThingsSwitch(SmartThingsEntity, SwitchEntity): """Define a SmartThings switch.""" - _attr_name = None + entity_description: SmartThingsSwitchEntityDescription + created_issue: bool = False + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsSwitchEntityDescription, + capability: Capability, + component: str = MAIN, + ) -> None: + """Initialize the switch.""" + super().__init__(client, device, {capability}, component=component) + self.entity_description = entity_description + self.switch_capability = capability + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{entity_description.status_attribute}_{entity_description.status_attribute}" + if ( + translation_keys := entity_description.component_translation_key + ) is not None and ( + translation_key := translation_keys.get(component) + ) is not None: + self._attr_translation_key = translation_key async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.execute_device_command( - Capability.SWITCH, + self.switch_capability, Command.OFF, ) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" await self.execute_device_command( - Capability.SWITCH, + self.switch_capability, Command.ON, ) @property def is_on(self) -> bool: - """Return true if light is on.""" - return self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on" + """Return true if switch is on.""" + return ( + self.get_attribute_value( + self.switch_capability, self.entity_description.status_attribute + ) + == "on" + ) + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + media_player = all( + capability in self.device.status[MAIN] + for capability in ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, + ) + ) + if ( + self.entity_description != SWITCH + and self.device.device.components[MAIN].manufacturer_category + not in { + Category.CLOTHING_CARE_MACHINE, + Category.COOKTOP, + Category.DRYER, + Category.WASHER, + Category.MICROWAVE, + Category.DISHWASHER, + } + ) or (self.entity_description != SWITCH and not media_player): + return + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + if not automations and not scripts: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + items_list = [ + f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})" + for integration, entities in ( + ("automation", automations), + ("script", scripts), + ) + for entity_id in entities + if (item := entity_reg.async_get(entity_id)) + ] + + identifier = "media_player" if media_player else "appliance" + + self.created_issue = True + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_switch_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_switch_{identifier}", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.created_issue: + return + async_delete_issue(self.hass, DOMAIN, f"deprecated_switch_{self.entity_id}") + + +class SmartThingsCommandSwitch(SmartThingsSwitch): + """Define a SmartThings command switch.""" + + entity_description: SmartThingsCommandSwitchEntityDescription + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.execute_device_command( + self.switch_capability, + self.entity_description.command, + "off", + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.execute_device_command( + self.switch_capability, + self.entity_description.command, + "on", + ) diff --git a/homeassistant/components/smartthings/update.py b/homeassistant/components/smartthings/update.py new file mode 100644 index 00000000000..bb226918596 --- /dev/null +++ b/homeassistant/components/smartthings/update.py @@ -0,0 +1,87 @@ +"""Support for update entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import Any + +from awesomeversion import AwesomeVersion +from pysmartthings import Attribute, Capability, Command + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add update entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsUpdateEntity(entry_data.client, device, {Capability.FIRMWARE_UPDATE}) + for device in entry_data.devices.values() + if Capability.FIRMWARE_UPDATE in device.status[MAIN] + ) + + +def is_hex_version(version: str) -> bool: + """Check if the version is a hex version.""" + return len(version) == 8 and all(c in "0123456789abcdefABCDEF" for c in version) + + +class SmartThingsUpdateEntity(SmartThingsEntity, UpdateEntity): + """Define a SmartThings update entity.""" + + _attr_device_class = UpdateDeviceClass.FIRMWARE + _attr_supported_features = ( + UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) + + @property + def installed_version(self) -> str | None: + """Return the installed version of the entity.""" + return self.get_attribute_value( + Capability.FIRMWARE_UPDATE, Attribute.CURRENT_VERSION + ) + + @property + def latest_version(self) -> str | None: + """Return the available version of the entity.""" + return self.get_attribute_value( + Capability.FIRMWARE_UPDATE, Attribute.AVAILABLE_VERSION + ) + + @property + def in_progress(self) -> bool: + """Return if the entity is in progress.""" + return ( + self.get_attribute_value(Capability.FIRMWARE_UPDATE, Attribute.STATE) + == "updateInProgress" + ) + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install the firmware update.""" + await self.execute_device_command( + Capability.FIRMWARE_UPDATE, + Command.UPDATE_FIRMWARE, + ) + + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return if the latest version is newer.""" + if is_hex_version(latest_version): + latest_version = f"0x{latest_version}" + if is_hex_version(installed_version): + installed_version = f"0x{installed_version}" + return AwesomeVersion(latest_version) > AwesomeVersion(installed_version) diff --git a/homeassistant/components/smartthings/valve.py b/homeassistant/components/smartthings/valve.py new file mode 100644 index 00000000000..4279d528f8b --- /dev/null +++ b/homeassistant/components/smartthings/valve.py @@ -0,0 +1,71 @@ +"""Support for valves through the SmartThings cloud API.""" + +from __future__ import annotations + +from pysmartthings import Attribute, Capability, Category, Command, SmartThings + +from homeassistant.components.valve import ( + ValveDeviceClass, + ValveEntity, + ValveEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + +DEVICE_CLASS_MAP: dict[Category | str, ValveDeviceClass] = { + Category.WATER_VALVE: ValveDeviceClass.WATER, + Category.GAS_VALVE: ValveDeviceClass.GAS, +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add valves for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsValve(entry_data.client, device) + for device in entry_data.devices.values() + if Capability.VALVE in device.status[MAIN] + ) + + +class SmartThingsValve(SmartThingsEntity, ValveEntity): + """Define a SmartThings valve.""" + + _attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + _attr_reports_position = False + _attr_name = None + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Init the class.""" + super().__init__(client, device, {Capability.VALVE}) + self._attr_device_class = DEVICE_CLASS_MAP.get( + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category + ) + + async def async_open_valve(self) -> None: + """Open the valve.""" + await self.execute_device_command( + Capability.VALVE, + Command.OPEN, + ) + + async def async_close_valve(self) -> None: + """Close the valve.""" + await self.execute_device_command( + Capability.VALVE, + Command.CLOSE, + ) + + @property + def is_closed(self) -> bool: + """Return if the valve is closed.""" + return self.get_attribute_value(Capability.VALVE, Attribute.VALVE) == "closed" diff --git a/homeassistant/components/smarttub/strings.json b/homeassistant/components/smarttub/strings.json index 974e5fb7d37..79fa7a4820f 100644 --- a/homeassistant/components/smarttub/strings.json +++ b/homeassistant/components/smarttub/strings.json @@ -49,17 +49,17 @@ }, "snooze_reminder": { "name": "Snooze a reminder", - "description": "Delay a reminder, so that it won't trigger again for a period of time.", + "description": "Temporarily suppresses the maintenance reminder on a hot tub.", "fields": { "days": { "name": "Days", - "description": "The number of days to delay the reminder." + "description": "The number of days to snooze the reminder." } } }, "reset_reminder": { "name": "Reset a reminder", - "description": "Reset a reminder, and set the next time it will be triggered.", + "description": "Resets the maintenance reminder on a hot tub.", "fields": { "days": { "name": "[%key:component::smarttub::services::snooze_reminder::fields::days::name%]", diff --git a/homeassistant/components/smarty/config_flow.py b/homeassistant/components/smarty/config_flow.py index 9a55356a990..a7f0bdd4123 100644 --- a/homeassistant/components/smarty/config_flow.py +++ b/homeassistant/components/smarty/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Smarty integration.""" +import logging from typing import Any from pysmarty2 import Smarty @@ -10,6 +11,8 @@ from homeassistant.const import CONF_HOST, CONF_NAME from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): """Smarty config flow.""" @@ -20,7 +23,8 @@ class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): try: if smarty.update(): return None - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "unknown" else: return "cannot_connect" diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 8f3e675ef6b..b3a6860e5b7 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from pysmlight import Api2, Info, Radio +from pysmlight import Api2 from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant @@ -50,9 +50,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -def get_radio(info: Info, idx: int) -> Radio: - """Get the radio object from the info.""" - assert info.radios is not None - return info.radios[idx] diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index fcfc364d983..ce4f8f43233 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -51,14 +51,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): self.client = Api2(self._host, session=async_get_clientsession(self.hass)) try: - info = await self.client.get_info() - self._host = str(info.device_ip) - self._device_name = str(info.hostname) - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): + info = await self.client.get_info() + self._host = str(info.device_ip) + self._device_name = str(info.hostname) + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + return await self._async_complete_entry(user_input) except SmlightConnectionError: errors["base"] = "cannot_connect" @@ -128,13 +128,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - info = await self.client.get_info() - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): - return await self._async_complete_entry(user_input) + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + + return await self._async_complete_entry(user_input) except SmlightConnectionError: return self.async_abort(reason="cannot_connect") diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 3f527d1fcd9..e9025203b8c 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -11,7 +11,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.2.3"], + "requirements": ["pysmlight==0.2.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py index 57a08d177d4..2f57843b5eb 100644 --- a/homeassistant/components/smlight/sensor.py +++ b/homeassistant/components/smlight/sensor.py @@ -37,7 +37,7 @@ class SmSensorEntityDescription(SensorEntityDescription): class SmInfoEntityDescription(SensorEntityDescription): """Class describing SMLIGHT information entities.""" - value_fn: Callable[[Info], StateType] + value_fn: Callable[[Info, int], StateType] INFO: list[SmInfoEntityDescription] = [ @@ -46,24 +46,25 @@ INFO: list[SmInfoEntityDescription] = [ translation_key="device_mode", device_class=SensorDeviceClass.ENUM, options=["eth", "wifi", "usb"], - value_fn=lambda x: x.coord_mode, + value_fn=lambda x, idx: x.coord_mode, ), SmInfoEntityDescription( key="firmware_channel", translation_key="firmware_channel", device_class=SensorDeviceClass.ENUM, options=["dev", "release"], - value_fn=lambda x: x.fw_channel, - ), - SmInfoEntityDescription( - key="zigbee_type", - translation_key="zigbee_type", - device_class=SensorDeviceClass.ENUM, - options=["coordinator", "router", "thread"], - value_fn=lambda x: x.zb_type, + value_fn=lambda x, idx: x.fw_channel, ), ] +RADIO_INFO = SmInfoEntityDescription( + key="zigbee_type", + translation_key="zigbee_type", + device_class=SensorDeviceClass.ENUM, + options=["coordinator", "router", "thread"], + value_fn=lambda x, idx: x.radios[idx].zb_type, +) + SENSORS: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( @@ -102,6 +103,16 @@ SENSORS: list[SmSensorEntityDescription] = [ ), ] +EXTRA_SENSOR = SmSensorEntityDescription( + key="zigbee_temperature_2", + translation_key="zigbee_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.zb_temp2, +) + UPTIME: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( key="core_uptime", @@ -127,8 +138,7 @@ async def async_setup_entry( ) -> None: """Set up SMLIGHT sensor based on a config entry.""" coordinator = entry.runtime_data.data - - async_add_entities( + entities: list[SmEntity] = list( chain( (SmInfoSensorEntity(coordinator, description) for description in INFO), (SmSensorEntity(coordinator, description) for description in SENSORS), @@ -136,6 +146,16 @@ async def async_setup_entry( ) ) + entities.extend( + SmInfoSensorEntity(coordinator, RADIO_INFO, idx) + for idx, _ in enumerate(coordinator.data.info.radios) + ) + + if coordinator.data.sensors.zb_temp2 is not None: + entities.append(SmSensorEntity(coordinator, EXTRA_SENSOR)) + + async_add_entities(entities) + class SmSensorEntity(SmEntity, SensorEntity): """Representation of a slzb sensor.""" @@ -172,17 +192,20 @@ class SmInfoSensorEntity(SmEntity, SensorEntity): self, coordinator: SmDataUpdateCoordinator, description: SmInfoEntityDescription, + idx: int = 0, ) -> None: """Initiate slzb sensor.""" super().__init__(coordinator) self.entity_description = description - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self.idx = idx + sensor = f"_{idx}" if idx else "" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}{sensor}" @property def native_value(self) -> StateType: """Return the sensor value.""" - value = self.entity_description.value_fn(self.coordinator.data.info) + value = self.entity_description.value_fn(self.coordinator.data.info, self.idx) options = self.entity_description.options if isinstance(value, int) and options is not None: diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py index 10d142e6221..3143f2f4290 100644 --- a/homeassistant/components/smlight/update.py +++ b/homeassistant/components/smlight/update.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import get_radio from .const import LOGGER from .coordinator import SmConfigEntry, SmFirmwareUpdateCoordinator, SmFwData from .entity import SmEntity @@ -56,7 +55,7 @@ CORE_UPDATE_ENTITY = SmUpdateEntityDescription( ZB_UPDATE_ENTITY = SmUpdateEntityDescription( key="zigbee_update", translation_key="zigbee_update", - installed_version=lambda x, idx: get_radio(x, idx).zb_version, + installed_version=lambda x, idx: x.radios[idx].zb_version, latest_version=zigbee_latest_version, ) @@ -75,7 +74,6 @@ async def async_setup_entry( entities = [SmUpdateEntity(coordinator, CORE_UPDATE_ENTITY)] radios = coordinator.data.info.radios - assert radios is not None entities.extend( SmUpdateEntity(coordinator, ZB_UPDATE_ENTITY, idx) diff --git a/homeassistant/components/snoo/__init__.py b/homeassistant/components/snoo/__init__.py index aaf0c828830..54834bf58ce 100644 --- a/homeassistant/components/snoo/__init__.py +++ b/homeassistant/components/snoo/__init__.py @@ -17,7 +17,13 @@ from .coordinator import SnooConfigEntry, SnooCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.EVENT, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, +] async def async_setup_entry(hass: HomeAssistant, entry: SnooConfigEntry) -> bool: diff --git a/homeassistant/components/snoo/binary_sensor.py b/homeassistant/components/snoo/binary_sensor.py new file mode 100644 index 00000000000..3c91db5b86d --- /dev/null +++ b/homeassistant/components/snoo/binary_sensor.py @@ -0,0 +1,70 @@ +"""Support for Snoo Binary Sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from python_snoo.containers import SnooData + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, + EntityCategory, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes a Snoo Binary Sensor.""" + + value_fn: Callable[[SnooData], bool] + + +BINARY_SENSOR_DESCRIPTIONS: list[SnooBinarySensorEntityDescription] = [ + SnooBinarySensorEntityDescription( + key="left_clip", + translation_key="left_clip", + value_fn=lambda data: data.left_safety_clip, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SnooBinarySensorEntityDescription( + key="right_clip", + translation_key="right_clip", + value_fn=lambda data: data.left_safety_clip, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooBinarySensor(coordinator, description) + for coordinator in coordinators.values() + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class SnooBinarySensor(SnooDescriptionEntity, BinarySensorEntity): + """A Binary sensor using Snoo coordinator.""" + + entity_description: SnooBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/snoo/event.py b/homeassistant/components/snoo/event.py new file mode 100644 index 00000000000..5932bfd9862 --- /dev/null +++ b/homeassistant/components/snoo/event.py @@ -0,0 +1,62 @@ +"""Support for Snoo Events.""" + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooEvent( + coordinator, + EventEntityDescription( + key="event", + translation_key="event", + event_types=[ + "timer", + "cry", + "command", + "safety_clip", + "long_activity_press", + "activity", + "power", + "status_requested", + "sticky_white_noise_updated", + ], + ), + ) + for coordinator in coordinators.values() + ) + + +class SnooEvent(SnooDescriptionEntity, EventEntity): + """A event using Snoo coordinator.""" + + @callback + def _async_handle_event(self) -> None: + """Handle the demo button event.""" + self._trigger_event( + self.coordinator.data.event.value, + ) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Add Event.""" + await super().async_added_to_hass() + if self.coordinator.data: + # If we were able to get data on startup - set it + # Otherwise, it will update when the coordinator gets data. + self._async_handle_event() + + def _handle_coordinator_update(self) -> None: + self._async_handle_event() + return super()._handle_coordinator_update() diff --git a/homeassistant/components/snoo/manifest.json b/homeassistant/components/snoo/manifest.json index c9306e58413..4084a7e3e79 100644 --- a/homeassistant/components/snoo/manifest.json +++ b/homeassistant/components/snoo/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_push", "loggers": ["snoo"], "quality_scale": "bronze", - "requirements": ["python-snoo==0.6.1"] + "requirements": ["python-snoo==0.6.4"] } diff --git a/homeassistant/components/snoo/select.py b/homeassistant/components/snoo/select.py new file mode 100644 index 00000000000..44624ed1a2d --- /dev/null +++ b/homeassistant/components/snoo/select.py @@ -0,0 +1,78 @@ +"""Support for Snoo Select.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from python_snoo.containers import SnooData, SnooDevice, SnooLevels +from python_snoo.exceptions import SnooCommandException +from python_snoo.snoo import Snoo + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooSelectEntityDescription(SelectEntityDescription): + """Describes a Snoo Select.""" + + value_fn: Callable[[SnooData], str] + set_value_fn: Callable[[Snoo, SnooDevice, str], Awaitable[None]] + + +SELECT_DESCRIPTIONS: list[SnooSelectEntityDescription] = [ + SnooSelectEntityDescription( + key="intensity", + translation_key="intensity", + value_fn=lambda data: data.state_machine.level.name, + set_value_fn=lambda snoo_api, device, state: snoo_api.set_level( + device, SnooLevels[state] + ), + options=[level.name for level in SnooLevels], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooSelect(coordinator, description) + for coordinator in coordinators.values() + for description in SELECT_DESCRIPTIONS + ) + + +class SnooSelect(SnooDescriptionEntity, SelectEntity): + """A sensor using Snoo coordinator.""" + + entity_description: SnooSelectEntityDescription + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, self.device, option + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="select_failed", + translation_placeholders={"name": str(self.name), "option": option}, + ) from err diff --git a/homeassistant/components/snoo/strings.json b/homeassistant/components/snoo/strings.json index 567fa30fca7..f7cf6a4820b 100644 --- a/homeassistant/components/snoo/strings.json +++ b/homeassistant/components/snoo/strings.json @@ -21,7 +21,46 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "exceptions": { + "select_failed": { + "message": "Error while updating {name} to {option}" + }, + "switch_on_failed": { + "message": "Turning {name} on failed" + }, + "switch_off_failed": { + "message": "Turning {name} off failed" + } + }, "entity": { + "binary_sensor": { + "left_clip": { + "name": "Left safety clip" + }, + "right_clip": { + "name": "Right safety clip" + } + }, + "event": { + "event": { + "name": "Snoo event", + "state_attributes": { + "event_type": { + "state": { + "timer": "Timer", + "cry": "Cry", + "command": "Command sent", + "safety_clip": "Safety clip changed", + "long_activity_press": "Long activity press", + "activity": "Activity press", + "power": "Power button pressed", + "status_requested": "Status requested", + "sticky_white_noise_updated": "Sleepytime sounds updated" + } + } + } + } + }, "sensor": { "state": { "name": "State", @@ -39,6 +78,27 @@ "time_left": { "name": "Time left" } + }, + "select": { + "intensity": { + "name": "Intensity", + "state": { + "baseline": "[%key:component::snoo::entity::sensor::state::state::baseline%]", + "level1": "[%key:component::snoo::entity::sensor::state::state::level1%]", + "level2": "[%key:component::snoo::entity::sensor::state::state::level2%]", + "level3": "[%key:component::snoo::entity::sensor::state::state::level3%]", + "level4": "[%key:component::snoo::entity::sensor::state::state::level4%]", + "stop": "[%key:component::snoo::entity::sensor::state::state::stop%]" + } + } + }, + "switch": { + "sticky_white_noise": { + "name": "Sleepytime sounds" + }, + "hold": { + "name": "Level lock" + } } } } diff --git a/homeassistant/components/snoo/switch.py b/homeassistant/components/snoo/switch.py new file mode 100644 index 00000000000..2ed322d5f6b --- /dev/null +++ b/homeassistant/components/snoo/switch.py @@ -0,0 +1,105 @@ +"""Support for Snoo Switches.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from python_snoo.containers import SnooData, SnooDevice +from python_snoo.exceptions import SnooCommandException +from python_snoo.snoo import Snoo + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooSwitchEntityDescription(SwitchEntityDescription): + """Describes a Snoo sensor.""" + + value_fn: Callable[[SnooData], bool] + set_value_fn: Callable[[Snoo, SnooDevice, SnooData, bool], Awaitable[None]] + + +BINARY_SENSOR_DESCRIPTIONS: list[SnooSwitchEntityDescription] = [ + SnooSwitchEntityDescription( + key="sticky_white_noise", + translation_key="sticky_white_noise", + value_fn=lambda data: data.state_machine.sticky_white_noise == "on", + set_value_fn=lambda snoo_api, device, _, state: snoo_api.set_sticky_white_noise( + device, state + ), + ), + SnooSwitchEntityDescription( + key="hold", + translation_key="hold", + value_fn=lambda data: data.state_machine.hold == "on", + set_value_fn=lambda snoo_api, device, data, state: snoo_api.set_level( + device, data.state_machine.level, state + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooSwitch(coordinator, description) + for coordinator in coordinators.values() + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class SnooSwitch(SnooDescriptionEntity, SwitchEntity): + """A switch using Snoo coordinator.""" + + entity_description: SnooSwitchEntityDescription + + @property + def is_on(self) -> bool | None: + """Return True if entity is on.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, + self.coordinator.device, + self.coordinator.data, + True, + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_on_failed", + translation_placeholders={"name": str(self.name), "status": "on"}, + ) from err + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, + self.coordinator.device, + self.coordinator.data, + False, + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_off_failed", + translation_placeholders={"name": str(self.name), "status": "off"}, + ) from err diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 6292b1332d7..48ebeece1ba 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -75,7 +75,7 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): await self.solarlog.test_extended_data_available() if logged_in or await self.solarlog.test_extended_data_available(): device_list = await self.solarlog.update_device_list() - self.solarlog.set_enabled_devices({key: True for key in device_list}) + self.solarlog.set_enabled_devices(dict.fromkeys(device_list, True)) async def _async_update_data(self) -> SolarlogData: """Update the data from the SolarLog device.""" diff --git a/homeassistant/components/sonos/__init__.py b/homeassistant/components/sonos/__init__.py index d530fa21e39..24580971ae2 100644 --- a/homeassistant/components/sonos/__init__.py +++ b/homeassistant/components/sonos/__init__.py @@ -7,6 +7,7 @@ from collections import OrderedDict from dataclasses import dataclass, field import datetime from functools import partial +from ipaddress import AddressValueError, IPv4Address import logging import socket from typing import Any, cast @@ -208,6 +209,14 @@ class SonosDiscoveryManager: async def async_subscribe_to_zone_updates(self, ip_address: str) -> None: """Test subscriptions and create SonosSpeakers based on results.""" + try: + _ = IPv4Address(ip_address) + except AddressValueError: + _LOGGER.debug( + "Sonos integration only supports IPv4 addresses, invalid ip_address received: %s", + ip_address, + ) + return soco = SoCo(ip_address) # Cache now to avoid household ID lookup during first ZoneGroupState processing await self.hass.async_add_executor_job( diff --git a/homeassistant/components/sonos/config_flow.py b/homeassistant/components/sonos/config_flow.py index 057cdb8ec08..b5e2c684281 100644 --- a/homeassistant/components/sonos/config_flow.py +++ b/homeassistant/components/sonos/config_flow.py @@ -31,6 +31,8 @@ class SonosDiscoveryFlowHandler(DiscoveryFlowHandler[Awaitable[bool]], domain=DO hostname = discovery_info.hostname if hostname is None or not hostname.lower().startswith("sonos"): return self.async_abort(reason="not_sonos_device") + if discovery_info.ip_address.version != 4: + return self.async_abort(reason="not_ipv4_address") if discovery_manager := self.hass.data.get(DATA_SONOS_DISCOVERY_MANAGER): host = discovery_info.host mdns_name = discovery_info.name diff --git a/homeassistant/components/sonos/const.py b/homeassistant/components/sonos/const.py index 8fb704cbfbc..cda40729dbc 100644 --- a/homeassistant/components/sonos/const.py +++ b/homeassistant/components/sonos/const.py @@ -32,6 +32,7 @@ SONOS_TRACKS = "tracks" SONOS_COMPOSER = "composers" SONOS_RADIO = "radio" SONOS_OTHER_ITEM = "other items" +SONOS_AUDIO_BOOK = "audio book" SONOS_STATE_PLAYING = "PLAYING" SONOS_STATE_TRANSITIONING = "TRANSITIONING" @@ -67,6 +68,7 @@ SONOS_TO_MEDIA_CLASSES = { "object.item": MediaClass.TRACK, "object.item.audioItem.musicTrack": MediaClass.TRACK, "object.item.audioItem.audioBroadcast": MediaClass.GENRE, + "object.item.audioItem.audioBook": MediaClass.TRACK, } SONOS_TO_MEDIA_TYPES = { @@ -84,6 +86,7 @@ SONOS_TO_MEDIA_TYPES = { "object.container.playlistContainer.sameArtist": MediaType.ARTIST, "object.container.playlistContainer": MediaType.PLAYLIST, "object.item.audioItem.musicTrack": MediaType.TRACK, + "object.item.audioItem.audioBook": MediaType.TRACK, } MEDIA_TYPES_TO_SONOS: dict[MediaType | str, str] = { @@ -113,6 +116,7 @@ SONOS_TYPES_MAPPING = { "object.item": SONOS_OTHER_ITEM, "object.item.audioItem.musicTrack": SONOS_TRACKS, "object.item.audioItem.audioBroadcast": SONOS_RADIO, + "object.item.audioItem.audioBook": SONOS_AUDIO_BOOK, } LIBRARY_TITLES_MAPPING = { diff --git a/homeassistant/components/sonos/favorites.py b/homeassistant/components/sonos/favorites.py index 5050555a7cb..333c4809e62 100644 --- a/homeassistant/components/sonos/favorites.py +++ b/homeassistant/components/sonos/favorites.py @@ -105,7 +105,7 @@ class SonosFavorites(SonosHouseholdCoordinator): @soco_error() def update_cache(self, soco: SoCo, update_id: int | None = None) -> bool: """Update cache of known favorites and return if cache has changed.""" - new_favorites = soco.music_library.get_sonos_favorites() + new_favorites = soco.music_library.get_sonos_favorites(full_album_art_uri=True) # Polled update_id values do not match event_id values # Each speaker can return a different polled update_id diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index 995d6cea08c..16b425dae50 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -165,6 +165,8 @@ async def async_browse_media( favorites_folder_payload, speaker.favorites, media_content_id, + media, + get_browse_image_url, ) payload = { @@ -443,7 +445,10 @@ def favorites_payload(favorites: SonosFavorites) -> BrowseMedia: def favorites_folder_payload( - favorites: SonosFavorites, media_content_id: str + favorites: SonosFavorites, + media_content_id: str, + media: SonosMedia, + get_browse_image_url: GetBrowseImageUrlType, ) -> BrowseMedia: """Create response payload to describe all items of a type of favorite. @@ -463,7 +468,14 @@ def favorites_folder_payload( media_content_type="favorite_item_id", can_play=True, can_expand=False, - thumbnail=getattr(favorite, "album_art_uri", None), + thumbnail=get_thumbnail_url_full( + media=media, + is_internal=True, + media_content_type="favorite_item_id", + media_content_id=favorite.item_id, + get_browse_image_url=get_browse_image_url, + item=favorite, + ), ) ) diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 0c66484202f..a774de0ae5b 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -462,11 +462,20 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Play a favorite.""" uri = favorite.reference.get_uri() soco = self.coordinator.soco - if soco.music_source_from_uri(uri) in [ - MUSIC_SRC_RADIO, - MUSIC_SRC_LINE_IN, - ]: - soco.play_uri(uri, title=favorite.title, timeout=LONG_SERVICE_TIMEOUT) + if ( + soco.music_source_from_uri(uri) + in [ + MUSIC_SRC_RADIO, + MUSIC_SRC_LINE_IN, + ] + or favorite.reference.item_class == "object.item.audioItem.audioBook" + ): + soco.play_uri( + uri, + title=favorite.title, + meta=favorite.resource_meta_data, + timeout=LONG_SERVICE_TIMEOUT, + ) else: soco.clear_queue() soco.add_to_queue(favorite.reference, timeout=LONG_SERVICE_TIMEOUT) diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 07d2e2db4e0..433bb3cc36a 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -8,7 +8,8 @@ "abort": { "not_sonos_device": "Discovered device is not a Sonos device", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "not_ipv4_address": "No IPv4 address in SSDP discovery information" } }, "issues": { diff --git a/homeassistant/components/spotify/config_flow.py b/homeassistant/components/spotify/config_flow.py index d99fa7793df..3478887d64c 100644 --- a/homeassistant/components/spotify/config_flow.py +++ b/homeassistant/components/spotify/config_flow.py @@ -41,7 +41,8 @@ class SpotifyFlowHandler( try: current_user = await spotify.get_current_user() - except Exception: # noqa: BLE001 + except Exception: + self.logger.exception("Error while connecting to Spotify") return self.async_abort(reason="connection_error") name = current_user.display_name diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index 90e573a1706..66d837c503f 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -13,7 +13,7 @@ "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "missing_configuration": "The Spotify integration is not configured. Please follow the documentation.", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "reauth_account_mismatch": "The Spotify account authenticated with, does not match the account needed re-authentication.", + "reauth_account_mismatch": "The Spotify account authenticated with does not match the account that needed re-authentication.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index 2b00a5b0d65..37b5dc2b647 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -6,5 +6,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.38", "sqlparse==0.5.0"] + "requirements": ["SQLAlchemy==2.0.39", "sqlparse==0.5.0"] } diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index fd641d3389d..78a97e38833 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -53,6 +53,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.MEDIA_PLAYER, Platform.SENSOR, ] diff --git a/homeassistant/components/squeezebox/button.py b/homeassistant/components/squeezebox/button.py new file mode 100644 index 00000000000..098df3a1b5c --- /dev/null +++ b/homeassistant/components/squeezebox/button.py @@ -0,0 +1,155 @@ +"""Platform for button integration for squeezebox.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import SqueezeboxConfigEntry +from .const import SIGNAL_PLAYER_DISCOVERED +from .coordinator import SqueezeBoxPlayerUpdateCoordinator +from .entity import SqueezeboxEntity + +_LOGGER = logging.getLogger(__name__) + +HARDWARE_MODELS_WITH_SCREEN = [ + "Squeezebox Boom", + "Squeezebox Radio", + "Transporter", + "Squeezebox Touch", + "Squeezebox", + "SliMP3", + "Squeezebox 1", + "Squeezebox 2", + "Squeezebox 3", +] + +HARDWARE_MODELS_WITH_TONE = [ + *HARDWARE_MODELS_WITH_SCREEN, + "Squeezebox Receiver", +] + + +@dataclass(frozen=True, kw_only=True) +class SqueezeboxButtonEntityDescription(ButtonEntityDescription): + """Squeezebox Button description.""" + + press_action: str + + +BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = tuple( + SqueezeboxButtonEntityDescription( + key=f"preset_{i}", + translation_key="preset", + translation_placeholders={"index": str(i)}, + press_action=f"preset_{i}.single", + ) + for i in range(1, 7) +) + +SCREEN_BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = ( + SqueezeboxButtonEntityDescription( + key="brightness_up", + translation_key="brightness_up", + press_action="brightness_up", + ), + SqueezeboxButtonEntityDescription( + key="brightness_down", + translation_key="brightness_down", + press_action="brightness_down", + ), +) + +TONE_BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = ( + SqueezeboxButtonEntityDescription( + key="bass_up", + translation_key="bass_up", + press_action="bass_up", + ), + SqueezeboxButtonEntityDescription( + key="bass_down", + translation_key="bass_down", + press_action="bass_down", + ), + SqueezeboxButtonEntityDescription( + key="treble_up", + translation_key="treble_up", + press_action="treble_up", + ), + SqueezeboxButtonEntityDescription( + key="treble_down", + translation_key="treble_down", + press_action="treble_down", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SqueezeboxConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Squeezebox button platform from a server config entry.""" + + # Add button entities when player discovered + async def _player_discovered( + player_coordinator: SqueezeBoxPlayerUpdateCoordinator, + ) -> None: + _LOGGER.debug( + "Setting up button entity for player %s, model %s", + player_coordinator.player.name, + player_coordinator.player.model, + ) + + entities: list[SqueezeboxButtonEntity] = [] + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in BUTTON_ENTITIES + ) + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in TONE_BUTTON_ENTITIES + if player_coordinator.player.model in HARDWARE_MODELS_WITH_TONE + ) + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in SCREEN_BUTTON_ENTITIES + if player_coordinator.player.model in HARDWARE_MODELS_WITH_SCREEN + ) + + async_add_entities(entities) + + entry.async_on_unload( + async_dispatcher_connect(hass, SIGNAL_PLAYER_DISCOVERED, _player_discovered) + ) + + +class SqueezeboxButtonEntity(SqueezeboxEntity, ButtonEntity): + """Representation of Buttons for Squeezebox entities.""" + + entity_description: SqueezeboxButtonEntityDescription + + def __init__( + self, + coordinator: SqueezeBoxPlayerUpdateCoordinator, + entity_description: SqueezeboxButtonEntityDescription, + ) -> None: + """Initialize the SqueezeBox Button.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{format_mac(self._player.player_id)}_{entity_description.key}" + ) + + async def async_press(self) -> None: + """Execute the button action.""" + await self._player.async_query("button", self.entity_description.press_action) diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 2853ad14217..31dd5b003b7 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -151,7 +151,8 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): if server.http_status == HTTPStatus.UNAUTHORIZED: return "invalid_auth" return "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown exception while validating connection") return "unknown" if "uuid" in status: diff --git a/homeassistant/components/squeezebox/entity.py b/homeassistant/components/squeezebox/entity.py index 027ca68edc6..2c443c24ffd 100644 --- a/homeassistant/components/squeezebox/entity.py +++ b/homeassistant/components/squeezebox/entity.py @@ -1,11 +1,37 @@ """Base class for Squeezebox Sensor entities.""" -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, STATUS_QUERY_UUID -from .coordinator import LMSStatusDataUpdateCoordinator +from .coordinator import ( + LMSStatusDataUpdateCoordinator, + SqueezeBoxPlayerUpdateCoordinator, +) + + +class SqueezeboxEntity(CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator]): + """Base entity class for Squeezebox entities.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SqueezeBoxPlayerUpdateCoordinator) -> None: + """Initialize the SqueezeBox entity.""" + super().__init__(coordinator) + self._player = coordinator.player + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, format_mac(self._player.player_id))}, + name=self._player.name, + connections={(CONNECTION_NETWORK_MAC, format_mac(self._player.player_id))}, + via_device=(DOMAIN, coordinator.server_uuid), + model=self._player.model, + manufacturer=self._player.creator, + ) class LMSStatusEntity(CoordinatorEntity[LMSStatusDataUpdateCoordinator]): diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 1767d92730a..40662477745 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -35,15 +35,10 @@ from homeassistant.helpers import ( entity_platform, entity_registry as er, ) -from homeassistant.helpers.device_registry import ( - CONNECTION_NETWORK_MAC, - DeviceInfo, - format_mac, -) +from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.start import async_at_start -from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .browse_media import ( @@ -68,6 +63,7 @@ from .const import ( SQUEEZEBOX_SOURCE_STRINGS, ) from .coordinator import SqueezeBoxPlayerUpdateCoordinator +from .entity import SqueezeboxEntity if TYPE_CHECKING: from . import SqueezeboxConfigEntry @@ -181,9 +177,7 @@ def get_announce_timeout(extra: dict) -> int | None: return announce_timeout -class SqueezeBoxMediaPlayerEntity( - CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator], MediaPlayerEntity -): +class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity): """Representation of the media player features of a SqueezeBox device. Wraps a pysqueezebox.Player() object. @@ -217,30 +211,10 @@ class SqueezeBoxMediaPlayerEntity( def __init__(self, coordinator: SqueezeBoxPlayerUpdateCoordinator) -> None: """Initialize the SqueezeBox device.""" super().__init__(coordinator) - player = coordinator.player - self._player = player self._query_result: bool | dict = {} self._remove_dispatcher: Callable | None = None self._previous_media_position = 0 - self._attr_unique_id = format_mac(player.player_id) - _manufacturer = None - if player.model.startswith("SqueezeLite") or "SqueezePlay" in player.model: - _manufacturer = "Ralph Irving" - elif ( - "Squeezebox" in player.model - or "Transporter" in player.model - or "Slim" in player.model - ): - _manufacturer = "Logitech" - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._attr_unique_id)}, - name=player.name, - connections={(CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - via_device=(DOMAIN, coordinator.server_uuid), - model=player.model, - manufacturer=_manufacturer, - ) + self._attr_unique_id = format_mac(self._player.player_id) self._browse_data = BrowseData() @callback diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index ed569989b56..83c5d7dd5d0 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -63,6 +63,29 @@ } }, "entity": { + "button": { + "preset": { + "name": "Preset {index}" + }, + "brightness_up": { + "name": "Brightness up" + }, + "brightness_down": { + "name": "Brightness down" + }, + "bass_up": { + "name": "Bass up" + }, + "bass_down": { + "name": "Bass down" + }, + "treble_up": { + "name": "Treble up" + }, + "treble_down": { + "name": "Treble down" + } + }, "binary_sensor": { "rescan": { "name": "Library rescan" diff --git a/homeassistant/components/srp_energy/strings.json b/homeassistant/components/srp_energy/strings.json index eca4f465435..5fa97b00b57 100644 --- a/homeassistant/components/srp_energy/strings.json +++ b/homeassistant/components/srp_energy/strings.json @@ -3,10 +3,10 @@ "step": { "user": { "data": { - "id": "Account Id", + "id": "Account ID", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "is_tou": "Is Time of Use Plan" + "is_tou": "Is Time-of-Use Price Plan" } } }, diff --git a/homeassistant/components/stookwijzer/diagnostics.py b/homeassistant/components/stookwijzer/diagnostics.py index 2849e0e976a..1f3ef4ee4ba 100644 --- a/homeassistant/components/stookwijzer/diagnostics.py +++ b/homeassistant/components/stookwijzer/diagnostics.py @@ -18,4 +18,5 @@ async def async_get_config_entry_diagnostics( "advice": client.advice, "air_quality_index": client.lki, "windspeed_ms": client.windspeed_ms, + "forecast": await client.async_get_forecast(), } diff --git a/homeassistant/components/stookwijzer/strings.json b/homeassistant/components/stookwijzer/strings.json index d7304fa1238..a028f1f19c5 100644 --- a/homeassistant/components/stookwijzer/strings.json +++ b/homeassistant/components/stookwijzer/strings.json @@ -29,7 +29,7 @@ }, "issues": { "location_migration_failed": { - "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integrations uses.\n\nMake sure you are connected to the internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", + "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integration uses.\n\nMake sure you are connected to the Internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", "title": "Migration of your location failed" } }, diff --git a/homeassistant/components/swiss_public_transport/config_flow.py b/homeassistant/components/swiss_public_transport/config_flow.py index 4dc6efc2e85..872044097d6 100644 --- a/homeassistant/components/swiss_public_transport/config_flow.py +++ b/homeassistant/components/swiss_public_transport/config_flow.py @@ -190,7 +190,7 @@ class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): return "cannot_connect" except OpendataTransportError: return "bad_config" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unknown error") return "unknown" return None diff --git a/homeassistant/components/switchbot/light.py b/homeassistant/components/switchbot/light.py index 0a2c342ecf0..4b9a7e1b988 100644 --- a/homeassistant/components/switchbot/light.py +++ b/homeassistant/components/switchbot/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from switchbot import ColorMode as SwitchBotColorMode, SwitchbotBaseLight @@ -68,7 +68,9 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = round(kwargs.get(ATTR_BRIGHTNESS, self.brightness) / 255 * 100) + brightness = round( + cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) / 255 * 100 + ) if ( self.supported_color_modes diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 567a33a8f43..d9f6f98d1fd 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.56.1"] + "requirements": ["PySwitchbot==0.58.0"] } diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index 025c40bff9e..d68c913db15 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -71,9 +72,14 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.HUMIDITY, ), + "illuminance": SensorEntityDescription( + key="illuminance", + native_unit_of_measurement=LIGHT_LUX, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.ILLUMINANCE, + ), "temperature": SensorEntityDescription( key="temperature", - name=None, native_unit_of_measurement=UnitOfTemperature.CELSIUS, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, diff --git a/homeassistant/components/switcher_kis/strings.json b/homeassistant/components/switcher_kis/strings.json index e380711303d..c3cf111199f 100644 --- a/homeassistant/components/switcher_kis/strings.json +++ b/homeassistant/components/switcher_kis/strings.json @@ -9,13 +9,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "username": "The email address used to sign in to the Switcher app.", + "token": "The local control token received from Switcher." } }, "reauth_confirm": { - "description": "Found a Switcher device that requires a token\nEnter your username and token\nFor more information see https://www.home-assistant.io/integrations/switcher_kis/#prerequisites", + "description": "[%key:component::switcher_kis::config::step::credentials::description%]", "data": { "username": "[%key:common::config_flow::data::username%]", "token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "username": "[%key:component::switcher_kis::config::step::credentials::data_description::username%]", + "token": "[%key:component::switcher_kis::config::step::credentials::data_description::token%]" } } }, diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index 1b26b7df84d..d9319beb595 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -9,7 +9,6 @@ from synology_dsm.api.surveillance_station import SynoSurveillanceStation from synology_dsm.api.surveillance_station.camera import SynoCamera from synology_dsm.exceptions import SynologyDSMNotLoggedInException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_SCAN_INTERVAL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -31,15 +30,16 @@ from .const import ( from .coordinator import ( SynologyDSMCameraUpdateCoordinator, SynologyDSMCentralUpdateCoordinator, + SynologyDSMConfigEntry, + SynologyDSMData, SynologyDSMSwitchUpdateCoordinator, ) -from .models import SynologyDSMData from .service import async_setup_services _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) -> bool: """Set up Synology DSM sensors.""" # Migrate device identifiers @@ -120,13 +120,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SYNOLOGY_CONNECTION_EXCEPTIONS as ex: raise ConfigEntryNotReady from ex - synology_data = SynologyDSMData( + entry.runtime_data = SynologyDSMData( api=api, coordinator_central=coordinator_central, + coordinator_central_old_update_success=True, coordinator_cameras=coordinator_cameras, coordinator_switches=coordinator_switches, ) - hass.data.setdefault(DOMAIN, {})[entry.unique_id] = synology_data await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @@ -140,28 +140,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_state_change(async_notify_backup_listeners) ) + def async_check_last_update_success() -> None: + if ( + last := coordinator_central.last_update_success + ) is not entry.runtime_data.coordinator_central_old_update_success: + entry.runtime_data.coordinator_central_old_update_success = last + async_notify_backup_listeners() + + entry.runtime_data.coordinator_central.async_add_listener( + async_check_last_update_success + ) + return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: SynologyDSMConfigEntry +) -> bool: """Unload Synology DSM sensors.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - entry_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + entry_data = entry.runtime_data await entry_data.api.async_unload() - hass.data[DOMAIN].pop(entry.unique_id) return unload_ok -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener( + hass: HomeAssistant, entry: SynologyDSMConfigEntry +) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) async def async_remove_config_entry_device( - hass: HomeAssistant, entry: ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, entry: SynologyDSMConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove synology_dsm config entry from a device.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api assert api.information is not None serial = api.information.serial diff --git a/homeassistant/components/synology_dsm/backup.py b/homeassistant/components/synology_dsm/backup.py index c4b44542059..46e47ebde16 100644 --- a/homeassistant/components/synology_dsm/backup.py +++ b/homeassistant/components/synology_dsm/backup.py @@ -17,7 +17,6 @@ from homeassistant.components.backup import ( BackupNotFound, suggested_filename, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator from homeassistant.helpers.json import json_dumps @@ -29,7 +28,7 @@ from .const import ( DATA_BACKUP_AGENT_LISTENERS, DOMAIN, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -47,19 +46,19 @@ async def async_get_backup_agents( hass: HomeAssistant, ) -> list[BackupAgent]: """Return a list of backup agents.""" - if not ( - entries := hass.config_entries.async_loaded_entries(DOMAIN) - ) or not hass.data.get(DOMAIN): + entries: list[SynologyDSMConfigEntry] = hass.config_entries.async_loaded_entries( + DOMAIN + ) + if not entries: LOGGER.debug("No proper config entry found") return [] - syno_datas: dict[str, SynologyDSMData] = hass.data[DOMAIN] return [ SynologyDSMBackupAgent(hass, entry, entry.unique_id) for entry in entries if entry.unique_id is not None - and (syno_data := syno_datas.get(entry.unique_id)) - and syno_data.api.file_station + and entry.runtime_data.api.file_station and entry.options.get(CONF_BACKUP_PATH) + and entry.runtime_data.coordinator_central.last_update_success ] @@ -91,7 +90,9 @@ class SynologyDSMBackupAgent(BackupAgent): domain = DOMAIN - def __init__(self, hass: HomeAssistant, entry: ConfigEntry, unique_id: str) -> None: + def __init__( + self, hass: HomeAssistant, entry: SynologyDSMConfigEntry, unique_id: str + ) -> None: """Initialize the Synology DSM backup agent.""" super().__init__() LOGGER.debug("Initializing Synology DSM backup agent for %s", entry.unique_id) @@ -100,7 +101,7 @@ class SynologyDSMBackupAgent(BackupAgent): self.path = ( f"{entry.options[CONF_BACKUP_SHARE]}/{entry.options[CONF_BACKUP_PATH]}" ) - syno_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + syno_data = entry.runtime_data self.api = syno_data.api self.backup_base_names: dict[str, str] = {} diff --git a/homeassistant/components/synology_dsm/binary_sensor.py b/homeassistant/components/synology_dsm/binary_sensor.py index 2f7d041cb10..1ae5fa90760 100644 --- a/homeassistant/components/synology_dsm/binary_sensor.py +++ b/homeassistant/components/synology_dsm/binary_sensor.py @@ -12,20 +12,17 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi -from .const import DOMAIN -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import ( SynologyDSMBaseEntity, SynologyDSMDeviceEntity, SynologyDSMEntityDescription, ) -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -64,11 +61,11 @@ STORAGE_DISK_BINARY_SENSORS: tuple[SynologyDSMBinarySensorEntityDescription, ... async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS binary sensor.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api coordinator = data.coordinator_central assert api.storage is not None diff --git a/homeassistant/components/synology_dsm/button.py b/homeassistant/components/synology_dsm/button.py index 6512c370334..79297b1f1b4 100644 --- a/homeassistant/components/synology_dsm/button.py +++ b/homeassistant/components/synology_dsm/button.py @@ -12,7 +12,6 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -20,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi from .const import DOMAIN -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -52,11 +51,11 @@ BUTTONS: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set buttons for device.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data async_add_entities(SynologyDSMButton(data.api, button) for button in BUTTONS) diff --git a/homeassistant/components/synology_dsm/camera.py b/homeassistant/components/synology_dsm/camera.py index acbcccb8894..f393b8efb55 100644 --- a/homeassistant/components/synology_dsm/camera.py +++ b/homeassistant/components/synology_dsm/camera.py @@ -16,7 +16,6 @@ from homeassistant.components.camera import ( CameraEntityDescription, CameraEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -29,9 +28,8 @@ from .const import ( DOMAIN, SIGNAL_CAMERA_SOURCE_CHANGED, ) -from .coordinator import SynologyDSMCameraUpdateCoordinator +from .coordinator import SynologyDSMCameraUpdateCoordinator, SynologyDSMConfigEntry from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData _LOGGER = logging.getLogger(__name__) @@ -47,11 +45,11 @@ class SynologyDSMCameraEntityDescription( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS cameras.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data if coordinator := data.coordinator_cameras: async_add_entities( SynoDSMCamera(data.api, coordinator, camera_id) diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index 58784862305..f0da6f8fe47 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -72,7 +72,7 @@ from .const import ( DOMAIN, SYNOLOGY_CONNECTION_EXCEPTIONS, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry _LOGGER = logging.getLogger(__name__) @@ -131,7 +131,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: SynologyDSMConfigEntry, ) -> SynologyDSMOptionsFlowHandler: """Get the options flow for this handler.""" return SynologyDSMOptionsFlowHandler() @@ -444,6 +444,8 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): class SynologyDSMOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" + config_entry: SynologyDSMConfigEntry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -451,7 +453,7 @@ class SynologyDSMOptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) - syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.config_entry.unique_id] + syno_data = self.config_entry.runtime_data data_schema = vol.Schema( { diff --git a/homeassistant/components/synology_dsm/coordinator.py b/homeassistant/components/synology_dsm/coordinator.py index 1b3e21090b8..dd97dedf65e 100644 --- a/homeassistant/components/synology_dsm/coordinator.py +++ b/homeassistant/components/synology_dsm/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass from datetime import timedelta import logging from typing import Any, Concatenate @@ -28,6 +29,20 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +@dataclass +class SynologyDSMData: + """Data for the synology_dsm integration.""" + + api: SynoApi + coordinator_central: SynologyDSMCentralUpdateCoordinator + coordinator_central_old_update_success: bool + coordinator_cameras: SynologyDSMCameraUpdateCoordinator | None + coordinator_switches: SynologyDSMSwitchUpdateCoordinator | None + + +type SynologyDSMConfigEntry = ConfigEntry[SynologyDSMData] + + def async_re_login_on_expired[_T: SynologyDSMUpdateCoordinator[Any], **_P, _R]( func: Callable[Concatenate[_T, _P], Awaitable[_R]], ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]: @@ -57,12 +72,12 @@ def async_re_login_on_expired[_T: SynologyDSMUpdateCoordinator[Any], **_P, _R]( class SynologyDSMUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): """DataUpdateCoordinator base class for synology_dsm.""" - config_entry: ConfigEntry + config_entry: SynologyDSMConfigEntry def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, update_interval: timedelta, ) -> None: @@ -85,7 +100,7 @@ class SynologyDSMSwitchUpdateCoordinator( def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for switch devices.""" @@ -116,7 +131,7 @@ class SynologyDSMCentralUpdateCoordinator(SynologyDSMUpdateCoordinator[None]): def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for central device.""" @@ -136,7 +151,7 @@ class SynologyDSMCameraUpdateCoordinator( def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for cameras.""" diff --git a/homeassistant/components/synology_dsm/diagnostics.py b/homeassistant/components/synology_dsm/diagnostics.py index b30955ae682..a673be23096 100644 --- a/homeassistant/components/synology_dsm/diagnostics.py +++ b/homeassistant/components/synology_dsm/diagnostics.py @@ -6,21 +6,20 @@ from typing import Any from homeassistant.components.camera import diagnostics as camera_diagnostics from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import CONF_DEVICE_TOKEN, DOMAIN -from .models import SynologyDSMData +from .const import CONF_DEVICE_TOKEN +from .coordinator import SynologyDSMConfigEntry TO_REDACT = {CONF_USERNAME, CONF_PASSWORD, CONF_DEVICE_TOKEN} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: SynologyDSMConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data syno_api = data.api dsm_info = syno_api.dsm.information diff --git a/homeassistant/components/synology_dsm/media_source.py b/homeassistant/components/synology_dsm/media_source.py index d35b262809c..6234f5e8dd0 100644 --- a/homeassistant/components/synology_dsm/media_source.py +++ b/homeassistant/components/synology_dsm/media_source.py @@ -2,6 +2,7 @@ from __future__ import annotations +from logging import getLogger import mimetypes from aiohttp import web @@ -22,7 +23,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from .const import DOMAIN, SHARED_SUFFIX -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry, SynologyDSMData + +LOGGER = getLogger(__name__) async def async_get_media_source(hass: HomeAssistant) -> MediaSource: @@ -41,15 +44,13 @@ class SynologyPhotosMediaSourceIdentifier: """Split identifier into parts.""" parts = identifier.split("/") - self.unique_id = None + self.unique_id = parts[0] self.album_id = None self.cache_key = None self.file_name = None self.is_shared = False self.passphrase = "" - self.unique_id = parts[0] - if len(parts) > 1: album_parts = parts[1].split("_") self.album_id = album_parts[0] @@ -82,7 +83,7 @@ class SynologyPhotosMediaSource(MediaSource): item: MediaSourceItem, ) -> BrowseMediaSource: """Return media.""" - if not self.hass.data.get(DOMAIN): + if not self.hass.config_entries.async_loaded_entries(DOMAIN): raise BrowseError("Diskstation not initialized") return BrowseMediaSource( domain=DOMAIN, @@ -116,7 +117,13 @@ class SynologyPhotosMediaSource(MediaSource): for entry in self.entries ] identifier = SynologyPhotosMediaSourceIdentifier(item.identifier) - diskstation: SynologyDSMData = self.hass.data[DOMAIN][identifier.unique_id] + entry: SynologyDSMConfigEntry | None = ( + self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, identifier.unique_id + ) + ) + assert entry + diskstation = entry.runtime_data assert diskstation.api.photos is not None if identifier.album_id is None: @@ -244,7 +251,7 @@ class SynologyDsmMediaView(http.HomeAssistantView): self, request: web.Request, source_dir_id: str, location: str ) -> web.Response: """Start a GET request.""" - if not self.hass.data.get(DOMAIN): + if not self.hass.config_entries.async_loaded_entries(DOMAIN): raise web.HTTPNotFound # location: {cache_key}/{filename} cache_key, file_name, passphrase = location.split("/") @@ -257,7 +264,13 @@ class SynologyDsmMediaView(http.HomeAssistantView): if not isinstance(mime_type, str): raise web.HTTPNotFound - diskstation: SynologyDSMData = self.hass.data[DOMAIN][source_dir_id] + entry: SynologyDSMConfigEntry | None = ( + self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, source_dir_id + ) + ) + assert entry + diskstation = entry.runtime_data assert diskstation.api.photos is not None item = SynoPhotosItem(image_id, "", "", "", cache_key, "xl", shared, passphrase) try: diff --git a/homeassistant/components/synology_dsm/models.py b/homeassistant/components/synology_dsm/models.py deleted file mode 100644 index 4f51d329ded..00000000000 --- a/homeassistant/components/synology_dsm/models.py +++ /dev/null @@ -1,22 +0,0 @@ -"""The synology_dsm integration models.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from .common import SynoApi -from .coordinator import ( - SynologyDSMCameraUpdateCoordinator, - SynologyDSMCentralUpdateCoordinator, - SynologyDSMSwitchUpdateCoordinator, -) - - -@dataclass -class SynologyDSMData: - """Data for the synology_dsm integration.""" - - api: SynoApi - coordinator_central: SynologyDSMCentralUpdateCoordinator - coordinator_cameras: SynologyDSMCameraUpdateCoordinator | None - coordinator_switches: SynologyDSMSwitchUpdateCoordinator | None diff --git a/homeassistant/components/synology_dsm/repairs.py b/homeassistant/components/synology_dsm/repairs.py index 725e77a2593..8a4e47a32b5 100644 --- a/homeassistant/components/synology_dsm/repairs.py +++ b/homeassistant/components/synology_dsm/repairs.py @@ -11,7 +11,6 @@ import voluptuous as vol from homeassistant import data_entry_flow from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.selector import ( @@ -28,7 +27,7 @@ from .const import ( ISSUE_MISSING_BACKUP_SETUP, SYNOLOGY_CONNECTION_EXCEPTIONS, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -36,7 +35,7 @@ LOGGER = logging.getLogger(__name__) class MissingBackupSetupRepairFlow(RepairsFlow): """Handler for an issue fixing flow.""" - def __init__(self, entry: ConfigEntry, issue_id: str) -> None: + def __init__(self, entry: SynologyDSMConfigEntry, issue_id: str) -> None: """Create flow.""" self.entry = entry self.issue_id = issue_id @@ -59,7 +58,7 @@ class MissingBackupSetupRepairFlow(RepairsFlow): ) -> data_entry_flow.FlowResult: """Handle the confirm step of a fix flow.""" - syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.entry.unique_id] + syno_data = self.entry.runtime_data if user_input is not None: self.hass.config_entries.async_update_entry( diff --git a/homeassistant/components/synology_dsm/sensor.py b/homeassistant/components/synology_dsm/sensor.py index 2987de7a7c7..566885e3989 100644 --- a/homeassistant/components/synology_dsm/sensor.py +++ b/homeassistant/components/synology_dsm/sensor.py @@ -16,7 +16,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_DISKS, PERCENTAGE, @@ -31,14 +30,13 @@ from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow from . import SynoApi -from .const import CONF_VOLUMES, DOMAIN, ENTITY_UNIT_LOAD -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .const import CONF_VOLUMES, ENTITY_UNIT_LOAD +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import ( SynologyDSMBaseEntity, SynologyDSMDeviceEntity, SynologyDSMEntityDescription, ) -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -287,11 +285,11 @@ INFORMATION_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS Sensor.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api coordinator = data.coordinator_central storage = api.storage diff --git a/homeassistant/components/synology_dsm/service.py b/homeassistant/components/synology_dsm/service.py index 366f7d4ba3a..40b6fd4bc30 100644 --- a/homeassistant/components/synology_dsm/service.py +++ b/homeassistant/components/synology_dsm/service.py @@ -3,13 +3,14 @@ from __future__ import annotations import logging +from typing import cast from synology_dsm.exceptions import SynologyDSMException from homeassistant.core import HomeAssistant, ServiceCall from .const import CONF_SERIAL, DOMAIN, SERVICE_REBOOT, SERVICE_SHUTDOWN, SERVICES -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -19,11 +20,20 @@ async def async_setup_services(hass: HomeAssistant) -> None: async def service_handler(call: ServiceCall) -> None: """Handle service call.""" - serial = call.data.get(CONF_SERIAL) - dsm_devices = hass.data[DOMAIN] + serial: str | None = call.data.get(CONF_SERIAL) + entries: list[SynologyDSMConfigEntry] = ( + hass.config_entries.async_loaded_entries(DOMAIN) + ) + dsm_devices = { + cast(str, entry.unique_id): entry.runtime_data for entry in entries + } if serial: - dsm_device: SynologyDSMData = hass.data[DOMAIN][serial] + entry: SynologyDSMConfigEntry | None = ( + hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, serial) + ) + assert entry + dsm_device = entry.runtime_data elif len(dsm_devices) == 1: dsm_device = next(iter(dsm_devices.values())) serial = next(iter(dsm_devices)) @@ -39,7 +49,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: return if call.service in [SERVICE_REBOOT, SERVICE_SHUTDOWN]: - if serial not in hass.data[DOMAIN]: + if serial not in dsm_devices: LOGGER.error("DSM with specified serial %s not found", serial) return LOGGER.debug("%s DSM with serial %s", call.service, serial) @@ -50,7 +60,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: ), call.service, ) - dsm_device = hass.data[DOMAIN][serial] + dsm_device = dsm_devices[serial] dsm_api = dsm_device.api try: await getattr(dsm_api, f"async_{call.service}")() diff --git a/homeassistant/components/synology_dsm/switch.py b/homeassistant/components/synology_dsm/switch.py index c4f1572ceea..91863ff3a26 100644 --- a/homeassistant/components/synology_dsm/switch.py +++ b/homeassistant/components/synology_dsm/switch.py @@ -9,16 +9,14 @@ from typing import Any from synology_dsm.api.surveillance_station import SynoSurveillanceStation from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi from .const import DOMAIN -from .coordinator import SynologyDSMSwitchUpdateCoordinator +from .coordinator import SynologyDSMConfigEntry, SynologyDSMSwitchUpdateCoordinator from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData _LOGGER = logging.getLogger(__name__) @@ -41,11 +39,11 @@ SURVEILLANCE_SWITCH: tuple[SynologyDSMSwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS switch.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data if coordinator := data.coordinator_switches: assert coordinator.version is not None async_add_entities( diff --git a/homeassistant/components/synology_dsm/update.py b/homeassistant/components/synology_dsm/update.py index 71eed2d7f1f..3048a38cb9c 100644 --- a/homeassistant/components/synology_dsm/update.py +++ b/homeassistant/components/synology_dsm/update.py @@ -9,15 +9,12 @@ from synology_dsm.api.core.upgrade import SynoCoreUpgrade from yarl import URL from homeassistant.components.update import UpdateEntity, UpdateEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import DOMAIN -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -39,11 +36,11 @@ UPDATE_ENTITIES: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up Synology DSM update entities.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data async_add_entities( SynoDSMUpdateEntity(data.api, data.coordinator_central, description) for description in UPDATE_ENTITIES diff --git a/homeassistant/components/system_bridge/strings.json b/homeassistant/components/system_bridge/strings.json index ef7495ef74f..1c079c1ef0c 100644 --- a/homeassistant/components/system_bridge/strings.json +++ b/homeassistant/components/system_bridge/strings.json @@ -109,7 +109,7 @@ "message": "No data received from {host}" }, "process_not_found": { - "message": "Could not find process with id {id}." + "message": "Could not find process with ID {id}." }, "timeout": { "message": "A timeout occurred for {title} ({host})" @@ -120,7 +120,7 @@ }, "issues": { "unsupported_version": { - "title": "System Bridge Upgrade Required", + "title": "System Bridge upgrade required", "description": "Your version of System Bridge for host {host} is not supported.\n\nPlease upgrade to the latest version." } }, diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index bd16464b290..9302746aa17 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.1"], + "requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/systemmonitor/strings.json b/homeassistant/components/systemmonitor/strings.json index fb8a318ff45..134fe390357 100644 --- a/homeassistant/components/systemmonitor/strings.json +++ b/homeassistant/components/systemmonitor/strings.json @@ -48,13 +48,13 @@ "name": "Last boot" }, "load_15m": { - "name": "Load (15m)" + "name": "Load (15 min)" }, "load_1m": { - "name": "Load (1m)" + "name": "Load (1 min)" }, "load_5m": { - "name": "Load (5m)" + "name": "Load (5 min)" }, "memory_free": { "name": "Memory free" diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 4b0203acda3..d1994075f12 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -10,12 +10,17 @@ from PyTado.interface import Tado from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_MODE, @@ -56,23 +61,34 @@ type TadoConfigEntry = ConfigEntry[TadoData] async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Set up Tado from a config entry.""" + if CONF_REFRESH_TOKEN not in entry.data: + raise ConfigEntryAuthFailed _async_import_options_from_data_if_missing(hass, entry) _LOGGER.debug("Setting up Tado connection") + _LOGGER.debug( + "Creating tado instance with refresh token: %s", + entry.data[CONF_REFRESH_TOKEN], + ) + + def create_tado_instance() -> tuple[Tado, str]: + """Create a Tado instance, this time with a previously obtained refresh token.""" + tado = Tado(saved_refresh_token=entry.data[CONF_REFRESH_TOKEN]) + return tado, tado.device_activation_status() + try: - tado = await hass.async_add_executor_job( - Tado, - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - ) + tado, device_status = await hass.async_add_executor_job(create_tado_instance) except PyTado.exceptions.TadoWrongCredentialsException as err: raise ConfigEntryError(f"Invalid Tado credentials. Error: {err}") from err except PyTado.exceptions.TadoException as err: raise ConfigEntryNotReady(f"Error during Tado setup: {err}") from err - _LOGGER.debug( - "Tado connection established for username: %s", entry.data[CONF_USERNAME] - ) + if device_status != "COMPLETED": + raise ConfigEntryAuthFailed( + f"Device login flow status is {device_status}. Starting re-authentication." + ) + + _LOGGER.debug("Tado connection established") coordinator = TadoDataUpdateCoordinator(hass, entry, tado) await coordinator.async_config_entry_first_refresh() @@ -82,11 +98,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool entry.runtime_data = TadoData(coordinator, mobile_coordinator) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def async_migrate_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: + """Migrate old entry.""" + + if entry.version < 2: + _LOGGER.debug("Migrating Tado entry to version 2. Current data: %s", entry.data) + data = dict(entry.data) + data.pop(CONF_USERNAME, None) + data.pop(CONF_PASSWORD, None) + hass.config_entries.async_update_entry(entry=entry, data=data, version=2) + _LOGGER.debug("Migration to version 2 successful") + return True + + @callback def _async_import_options_from_data_if_missing( hass: HomeAssistant, entry: TadoConfigEntry @@ -106,11 +134,6 @@ def _async_import_options_from_data_if_missing( hass.config_entries.async_update_entry(entry, options=options) -async def update_listener(hass: HomeAssistant, entry: TadoConfigEntry): - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) - - async def async_unload_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index e6aa921d428..6a2067ffff1 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -157,8 +157,8 @@ async def create_climate_entity( TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_OFF], TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_SMART_SCHEDULE], ] - supported_fan_modes = None - supported_swing_modes = None + supported_fan_modes: list[str] | None = None + supported_swing_modes: list[str] | None = None heat_temperatures = None cool_temperatures = None diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index f251a292800..64763469885 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -2,22 +2,25 @@ from __future__ import annotations +import asyncio +from collections.abc import Mapping import logging from typing import Any -import PyTado +from PyTado.exceptions import TadoException +from PyTado.http import DeviceActivationStatus from PyTado.interface import Tado -import requests.exceptions import voluptuous as vol +from yarl import URL from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.service_info.zeroconf import ( ATTR_PROPERTIES_ID, @@ -26,137 +29,149 @@ from homeassistant.helpers.service_info.zeroconf import ( from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, - UNIQUE_ID, ) _LOGGER = logging.getLogger(__name__) -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } -) - - -async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: - """Validate the user input allows us to connect. - - Data has the keys from DATA_SCHEMA with values provided by the user. - """ - - try: - tado = await hass.async_add_executor_job( - Tado, data[CONF_USERNAME], data[CONF_PASSWORD] - ) - tado_me = await hass.async_add_executor_job(tado.get_me) - except KeyError as ex: - raise InvalidAuth from ex - except RuntimeError as ex: - raise CannotConnect from ex - except requests.exceptions.HTTPError as ex: - if ex.response.status_code > 400 and ex.response.status_code < 500: - raise InvalidAuth from ex - raise CannotConnect from ex - - if "homes" not in tado_me or len(tado_me["homes"]) == 0: - raise NoHomes - - home = tado_me["homes"][0] - unique_id = str(home["id"]) - name = home["name"] - - return {"title": name, UNIQUE_ID: unique_id} - class TadoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Tado.""" - VERSION = 1 + VERSION = 2 + login_task: asyncio.Task | None = None + refresh_token: str | None = None + tado: Tado | None = None + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth on credential failure.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Prepare reauth.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + + return await self.async_step_user() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the initial step.""" - errors = {} - if user_input is not None: + """Handle users reauth credentials.""" + + if self.tado is None: + _LOGGER.debug("Initiating device activation") try: - validated = await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except InvalidAuth: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" + self.tado = await self.hass.async_add_executor_job(Tado) + except TadoException: + _LOGGER.exception("Error while initiating Tado") + return self.async_abort(reason="cannot_connect") + assert self.tado is not None + tado_device_url = self.tado.device_verification_url() + user_code = URL(tado_device_url).query["user_code"] - if "base" not in errors: - await self.async_set_unique_id(validated[UNIQUE_ID]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=validated["title"], data=user_input - ) + async def _wait_for_login() -> None: + """Wait for the user to login.""" + assert self.tado is not None + _LOGGER.debug("Waiting for device activation") + try: + await self.hass.async_add_executor_job(self.tado.device_activation) + except Exception as ex: + _LOGGER.exception("Error while waiting for device activation") + raise CannotConnect from ex - return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + if ( + self.tado.device_activation_status() + is not DeviceActivationStatus.COMPLETED + ): + raise CannotConnect + + _LOGGER.debug("Checking login task") + if self.login_task is None: + _LOGGER.debug("Creating task for device activation") + self.login_task = self.hass.async_create_task(_wait_for_login()) + + if self.login_task.done(): + _LOGGER.debug("Login task is done, checking results") + if self.login_task.exception(): + return self.async_show_progress_done(next_step_id="timeout") + self.refresh_token = await self.hass.async_add_executor_job( + self.tado.get_refresh_token + ) + return self.async_show_progress_done(next_step_id="finish_login") + + return self.async_show_progress( + step_id="user", + progress_action="wait_for_device", + description_placeholders={ + "url": tado_device_url, + "code": user_code, + }, + progress_task=self.login_task, ) + async def async_step_finish_login( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the finalization of reauth.""" + _LOGGER.debug("Finalizing reauth") + assert self.tado is not None + tado_me = await self.hass.async_add_executor_job(self.tado.get_me) + + if "homes" not in tado_me or len(tado_me["homes"]) == 0: + return self.async_abort(reason="no_homes") + + home = tado_me["homes"][0] + unique_id = str(home["id"]) + name = home["name"] + + if self.source != SOURCE_REAUTH: + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=name, + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + async def async_step_timeout( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle issues that need transition await from progress step.""" + if user_input is None: + return self.async_show_form( + step_id="timeout", + ) + del self.login_task + return await self.async_step_user() + async def async_step_homekit( self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle HomeKit discovery.""" self._async_abort_entries_match() properties = { - key.lower(): value for (key, value) in discovery_info.properties.items() + key.lower(): value for key, value in discovery_info.properties.items() } await self.async_set_unique_id(properties[ATTR_PROPERTIES_ID]) self._abort_if_unique_id_configured() return await self.async_step_user() - async def async_step_reconfigure( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - errors: dict[str, str] = {} - reconfigure_entry = self._get_reconfigure_entry() - - if user_input is not None: - user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] - try: - await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except PyTado.exceptions.TadoWrongCredentialsException: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - - if not errors: - return self.async_update_reload_and_abort( - reconfigure_entry, data_updates=user_input - ) - - return self.async_show_form( - step_id="reconfigure", - data_schema=vol.Schema( - { - vol.Required(CONF_PASSWORD): str, - } - ), - errors=errors, - description_placeholders={ - CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] - }, - ) - @staticmethod @callback def async_get_options_flow( @@ -173,8 +188,10 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(data=user_input) + if user_input: + result = self.async_create_entry(data=user_input) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + return result data_schema = vol.Schema( { @@ -191,11 +208,3 @@ class OptionsFlowHandler(OptionsFlow): class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" - - -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" - - -class NoHomes(HomeAssistantError): - """Error to indicate the account has no homes.""" diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index bdc4bff1943..7720ff09110 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -37,6 +37,7 @@ TADO_HVAC_ACTION_TO_HA_HVAC_ACTION = { # Configuration CONF_FALLBACK = "fallback" CONF_HOME_ID = "home_id" +CONF_REFRESH_TOKEN = "refresh_token" DATA = "data" # Weather diff --git a/homeassistant/components/tado/coordinator.py b/homeassistant/components/tado/coordinator.py index 559bc4a16fb..5f3aa1de1e4 100644 --- a/homeassistant/components/tado/coordinator.py +++ b/homeassistant/components/tado/coordinator.py @@ -10,7 +10,6 @@ from PyTado.interface import Tado from requests import RequestException from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -20,6 +19,7 @@ if TYPE_CHECKING: from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, INSIDE_TEMPERATURE_MEASUREMENT, @@ -58,8 +58,7 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): update_interval=SCAN_INTERVAL, ) self._tado = tado - self._username = config_entry.data[CONF_USERNAME] - self._password = config_entry.data[CONF_PASSWORD] + self._refresh_token = config_entry.data[CONF_REFRESH_TOKEN] self._fallback = config_entry.options.get( CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT ) @@ -108,6 +107,18 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): self.data["weather"] = home["weather"] self.data["geofence"] = home["geofence"] + refresh_token = await self.hass.async_add_executor_job( + self._tado.get_refresh_token + ) + + if refresh_token != self._refresh_token: + _LOGGER.debug("New refresh token obtained from Tado: %s", refresh_token) + self._refresh_token = refresh_token + self.hass.config_entries.async_update_entry( + self.config_entry, + data={**self.config_entry.data, CONF_REFRESH_TOKEN: refresh_token}, + ) + return self.data async def _async_update_devices(self) -> dict[str, dict]: diff --git a/homeassistant/components/tado/helper.py b/homeassistant/components/tado/helper.py index 571a757a3e8..5c515e00cf0 100644 --- a/homeassistant/components/tado/helper.py +++ b/homeassistant/components/tado/helper.py @@ -53,13 +53,13 @@ def decide_duration( return duration -def generate_supported_fanmodes(tado_to_ha_mapping: dict[str, str], options: list[str]): +def generate_supported_fanmodes( + tado_to_ha_mapping: dict[str, str], options: list[str] +) -> list[str] | None: """Return correct list of fan modes or None.""" supported_fanmodes = [ - tado_to_ha_mapping.get(option) - for option in options - if tado_to_ha_mapping.get(option) is not None + val for option in options if (val := tado_to_ha_mapping.get(option)) is not None ] if not supported_fanmodes: return None diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index b83e2695137..75ddbacc585 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.18.6"] + "requirements": ["python-tado==0.18.9"] } diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index ff1afc3c03d..c7aef7eb51c 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -1,33 +1,24 @@ { "config": { + "progress": { + "wait_for_device": "To authenticate, open the following URL and login at Tado:\n{url}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{code}```\n\n\nThe login attempt will time out after five minutes." + }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "could_not_authenticate": "Could not authenticate with Tado.", + "no_homes": "There are no homes linked to this Tado account.", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "step": { - "user": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "Connect to your Tado account" + "reauth_confirm": { + "title": "Authenticate with Tado", + "description": "You need to reauthenticate with Tado. Press `Submit` to start the authentication process." }, - "reconfigure": { - "title": "Reconfigure your Tado", - "description": "Reconfigure the entry for your account: `{username}`.", - "data": { - "password": "[%key:common::config_flow::data::password%]" - }, - "data_description": { - "password": "Enter the (new) password for Tado." - } + "timeout": { + "description": "The authentication process timed out. Please try again." } - }, - "error": { - "unknown": "[%key:common::config_flow::error::unknown%]", - "no_homes": "There are no homes linked to this Tado account.", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/tankerkoenig/config_flow.py b/homeassistant/components/tankerkoenig/config_flow.py index 8796ae46ab7..b269eaaaf55 100644 --- a/homeassistant/components/tankerkoenig/config_flow.py +++ b/homeassistant/components/tankerkoenig/config_flow.py @@ -39,7 +39,7 @@ from homeassistant.helpers.selector import ( NumberSelectorConfig, ) -from .const import CONF_FUEL_TYPES, CONF_STATIONS, DEFAULT_RADIUS, DOMAIN, FUEL_TYPES +from .const import CONF_STATIONS, DEFAULT_RADIUS, DOMAIN async def async_get_nearby_stations( @@ -175,10 +175,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): vol.Required( CONF_API_KEY, default=user_input.get(CONF_API_KEY, "") ): cv.string, - vol.Required( - CONF_FUEL_TYPES, - default=user_input.get(CONF_FUEL_TYPES, list(FUEL_TYPES)), - ): cv.multi_select(FUEL_TYPES), vol.Required( CONF_LOCATION, default=user_input.get( diff --git a/homeassistant/components/tankerkoenig/const.py b/homeassistant/components/tankerkoenig/const.py index c2a1dba9b6a..6761d20f4ce 100644 --- a/homeassistant/components/tankerkoenig/const.py +++ b/homeassistant/components/tankerkoenig/const.py @@ -3,14 +3,11 @@ DOMAIN = "tankerkoenig" NAME = "tankerkoenig" -CONF_FUEL_TYPES = "fuel_types" CONF_STATIONS = "stations" DEFAULT_RADIUS = 2 DEFAULT_SCAN_INTERVAL = 30 -FUEL_TYPES = {"e5": "Super", "e10": "Super E10", "diesel": "Diesel"} - ATTR_BRAND = "brand" ATTR_CITY = "city" ATTR_FUEL_TYPE = "fuel_type" diff --git a/homeassistant/components/tankerkoenig/coordinator.py b/homeassistant/components/tankerkoenig/coordinator.py index 1f73d0577b3..f1e6bc8c865 100644 --- a/homeassistant/components/tankerkoenig/coordinator.py +++ b/homeassistant/components/tankerkoenig/coordinator.py @@ -24,7 +24,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_FUEL_TYPES, CONF_STATIONS, DOMAIN +from .const import CONF_STATIONS, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -54,7 +54,6 @@ class TankerkoenigDataUpdateCoordinator(DataUpdateCoordinator[dict[str, PriceInf self._selected_stations: list[str] = self.config_entry.data[CONF_STATIONS] self.stations: dict[str, Station] = {} - self.fuel_types: list[str] = self.config_entry.data[CONF_FUEL_TYPES] self.show_on_map: bool = self.config_entry.options[CONF_SHOW_ON_MAP] self._tankerkoenig = Tankerkoenig( diff --git a/homeassistant/components/tankerkoenig/strings.json b/homeassistant/components/tankerkoenig/strings.json index 29f4f439dd5..db620b2b11c 100644 --- a/homeassistant/components/tankerkoenig/strings.json +++ b/homeassistant/components/tankerkoenig/strings.json @@ -5,7 +5,6 @@ "data": { "name": "Region name", "api_key": "[%key:common::config_flow::data::api_key%]", - "fuel_types": "Fuel types", "location": "[%key:common::config_flow::data::location%]", "stations": "Additional fuel stations", "radius": "Search radius" diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index b3c09049ae5..15e1f7d4f0e 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -548,7 +548,7 @@ class TelegramNotificationService: """Initialize the service.""" self.allowed_chat_ids = allowed_chat_ids self._default_user = self.allowed_chat_ids[0] - self._last_message_id = {user: None for user in self.allowed_chat_ids} + self._last_message_id = dict.fromkeys(self.allowed_chat_ids) self._parsers = { PARSER_HTML: ParseMode.HTML, PARSER_MD: ParseMode.MARKDOWN, diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index 9963731c784..4e07d67f6e9 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -13,9 +13,11 @@ from homeassistant.components.blueprint import ( ) from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN from homeassistant.config import async_log_schema_error, config_without_domain from homeassistant.const import ( @@ -36,9 +38,11 @@ from . import ( binary_sensor as binary_sensor_platform, button as button_platform, image as image_platform, + light as light_platform, number as number_platform, select as select_platform, sensor as sensor_platform, + switch as switch_platform, weather as weather_platform, ) from .const import ( @@ -104,11 +108,19 @@ CONFIG_SECTION_SCHEMA = vol.Schema( vol.Optional(IMAGE_DOMAIN): vol.All( cv.ensure_list, [image_platform.IMAGE_SCHEMA] ), + vol.Optional(LIGHT_DOMAIN): vol.All( + cv.ensure_list, [light_platform.LIGHT_SCHEMA] + ), vol.Optional(WEATHER_DOMAIN): vol.All( cv.ensure_list, [weather_platform.WEATHER_SCHEMA] ), + vol.Optional(SWITCH_DOMAIN): vol.All( + cv.ensure_list, [switch_platform.SWITCH_SCHEMA] + ), }, - ensure_domains_do_not_have_trigger_or_action(BUTTON_DOMAIN), + ensure_domains_do_not_have_trigger_or_action( + BUTTON_DOMAIN, LIGHT_DOMAIN, SWITCH_DOMAIN + ), ) ) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 352f571078a..1cc47c74aa0 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -26,9 +26,13 @@ from homeassistant.components.light import ( filter_supported_color_modes, ) from homeassistant.const import ( + CONF_EFFECT, CONF_ENTITY_ID, CONF_FRIENDLY_NAME, CONF_LIGHTS, + CONF_NAME, + CONF_RGB, + CONF_STATE, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, STATE_OFF, @@ -36,15 +40,18 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, template from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import color as color_util -from .const import DOMAIN +from .const import CONF_OBJECT_ID, CONF_PICTURE, DOMAIN from .template_entity import ( + LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS, + TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY, + TEMPLATE_ENTITY_ICON_SCHEMA, TemplateEntity, rewrite_common_legacy_to_modern_conf, ) @@ -56,33 +63,96 @@ _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] CONF_COLOR_ACTION = "set_color" CONF_COLOR_TEMPLATE = "color_template" +CONF_HS = "hs" CONF_HS_ACTION = "set_hs" CONF_HS_TEMPLATE = "hs_template" CONF_RGB_ACTION = "set_rgb" CONF_RGB_TEMPLATE = "rgb_template" +CONF_RGBW = "rgbw" CONF_RGBW_ACTION = "set_rgbw" CONF_RGBW_TEMPLATE = "rgbw_template" +CONF_RGBWW = "rgbww" CONF_RGBWW_ACTION = "set_rgbww" CONF_RGBWW_TEMPLATE = "rgbww_template" CONF_EFFECT_ACTION = "set_effect" +CONF_EFFECT_LIST = "effect_list" CONF_EFFECT_LIST_TEMPLATE = "effect_list_template" CONF_EFFECT_TEMPLATE = "effect_template" +CONF_LEVEL = "level" CONF_LEVEL_ACTION = "set_level" CONF_LEVEL_TEMPLATE = "level_template" +CONF_MAX_MIREDS = "max_mireds" CONF_MAX_MIREDS_TEMPLATE = "max_mireds_template" +CONF_MIN_MIREDS = "min_mireds" CONF_MIN_MIREDS_TEMPLATE = "min_mireds_template" CONF_OFF_ACTION = "turn_off" CONF_ON_ACTION = "turn_on" -CONF_SUPPORTS_TRANSITION = "supports_transition_template" +CONF_SUPPORTS_TRANSITION = "supports_transition" +CONF_SUPPORTS_TRANSITION_TEMPLATE = "supports_transition_template" CONF_TEMPERATURE_ACTION = "set_temperature" +CONF_TEMPERATURE = "temperature" CONF_TEMPERATURE_TEMPLATE = "temperature_template" CONF_WHITE_VALUE_ACTION = "set_white_value" +CONF_WHITE_VALUE = "white_value" CONF_WHITE_VALUE_TEMPLATE = "white_value_template" DEFAULT_MIN_MIREDS = 153 DEFAULT_MAX_MIREDS = 500 -LIGHT_SCHEMA = vol.All( +LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | { + CONF_COLOR_ACTION: CONF_HS_ACTION, + CONF_COLOR_TEMPLATE: CONF_HS, + CONF_EFFECT_LIST_TEMPLATE: CONF_EFFECT_LIST, + CONF_EFFECT_TEMPLATE: CONF_EFFECT, + CONF_HS_TEMPLATE: CONF_HS, + CONF_LEVEL_TEMPLATE: CONF_LEVEL, + CONF_MAX_MIREDS_TEMPLATE: CONF_MAX_MIREDS, + CONF_MIN_MIREDS_TEMPLATE: CONF_MIN_MIREDS, + CONF_RGB_TEMPLATE: CONF_RGB, + CONF_RGBW_TEMPLATE: CONF_RGBW, + CONF_RGBWW_TEMPLATE: CONF_RGBWW, + CONF_SUPPORTS_TRANSITION_TEMPLATE: CONF_SUPPORTS_TRANSITION, + CONF_TEMPERATURE_TEMPLATE: CONF_TEMPERATURE, + CONF_VALUE_TEMPLATE: CONF_STATE, + CONF_WHITE_VALUE_TEMPLATE: CONF_WHITE_VALUE, +} + +DEFAULT_NAME = "Template Light" + +LIGHT_SCHEMA = ( + vol.Schema( + { + vol.Inclusive(CONF_EFFECT_ACTION, "effect"): cv.SCRIPT_SCHEMA, + vol.Inclusive(CONF_EFFECT_LIST, "effect"): cv.template, + vol.Inclusive(CONF_EFFECT, "effect"): cv.template, + vol.Optional(CONF_HS_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_HS): cv.template, + vol.Optional(CONF_LEVEL_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_LEVEL): cv.template, + vol.Optional(CONF_MAX_MIREDS): cv.template, + vol.Optional(CONF_MIN_MIREDS): cv.template, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, + vol.Optional(CONF_PICTURE): cv.template, + vol.Optional(CONF_RGB_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGB): cv.template, + vol.Optional(CONF_RGBW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBW): cv.template, + vol.Optional(CONF_RGBWW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBWW): cv.template, + vol.Optional(CONF_STATE): cv.template, + vol.Optional(CONF_SUPPORTS_TRANSITION): cv.template, + vol.Optional(CONF_TEMPERATURE_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_TEMPERATURE): cv.template, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, + vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, + } + ) + .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) + .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) +) + +LEGACY_LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( { @@ -107,7 +177,7 @@ LIGHT_SCHEMA = vol.All( vol.Optional(CONF_MIN_MIREDS_TEMPLATE): cv.template, vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, - vol.Optional(CONF_SUPPORTS_TRANSITION): cv.template, + vol.Optional(CONF_SUPPORTS_TRANSITION_TEMPLATE): cv.template, vol.Optional(CONF_TEMPERATURE_ACTION): cv.SCRIPT_SCHEMA, vol.Optional(CONF_TEMPERATURE_TEMPLATE): cv.template, vol.Optional(CONF_UNIQUE_ID): cv.string, @@ -121,29 +191,50 @@ PLATFORM_SCHEMA = vol.All( cv.removed(CONF_WHITE_VALUE_ACTION), cv.removed(CONF_WHITE_VALUE_TEMPLATE), LIGHT_PLATFORM_SCHEMA.extend( - {vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LIGHT_SCHEMA)} + {vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LEGACY_LIGHT_SCHEMA)} ), ) -async def _async_create_entities(hass: HomeAssistant, config): +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, config: dict[str, dict] +) -> list[dict]: + """Rewrite legacy switch configuration definitions to modern ones.""" + lights = [] + for object_id, entity_conf in config.items(): + entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id} + + entity_conf = rewrite_common_legacy_to_modern_conf( + hass, entity_conf, LEGACY_FIELDS + ) + + if CONF_NAME not in entity_conf: + entity_conf[CONF_NAME] = template.Template(object_id, hass) + + lights.append(entity_conf) + + return lights + + +@callback +def _async_create_template_tracking_entities( + async_add_entities: AddEntitiesCallback, + hass: HomeAssistant, + definitions: list[dict], + unique_id_prefix: str | None, +) -> None: """Create the Template Lights.""" lights = [] - for object_id, entity_config in config[CONF_LIGHTS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) - unique_id = entity_config.get(CONF_UNIQUE_ID) + for entity_conf in definitions: + unique_id = entity_conf.get(CONF_UNIQUE_ID) - lights.append( - LightTemplate( - hass, - object_id, - entity_config, - unique_id, - ) - ) + if unique_id and unique_id_prefix: + unique_id = f"{unique_id_prefix}-{unique_id}" - return lights + lights.append(LightTemplate(hass, entity_conf, unique_id)) + + async_add_entities(lights) async def async_setup_platform( @@ -153,7 +244,21 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the template lights.""" - async_add_entities(await _async_create_entities(hass, config)) + if discovery_info is None: + _async_create_template_tracking_entities( + async_add_entities, + hass, + rewrite_legacy_to_modern_conf(hass, config[CONF_LIGHTS]), + None, + ) + return + + _async_create_template_tracking_entities( + async_add_entities, + hass, + discovery_info["entities"], + discovery_info["unique_id"], + ) class LightTemplate(TemplateEntity, LightEntity): @@ -164,33 +269,30 @@ class LightTemplate(TemplateEntity, LightEntity): def __init__( self, hass: HomeAssistant, - object_id, config: dict[str, Any], - unique_id, + unique_id: str | None, ) -> None: """Initialize the light.""" - super().__init__( - hass, config=config, fallback_name=object_id, unique_id=unique_id - ) - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, object_id, hass=hass - ) + super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id) + if (object_id := config.get(CONF_OBJECT_ID)) is not None: + self.entity_id = async_generate_entity_id( + ENTITY_ID_FORMAT, object_id, hass=hass + ) name = self._attr_name if TYPE_CHECKING: assert name is not None - self._template = config.get(CONF_VALUE_TEMPLATE) - self._level_template = config.get(CONF_LEVEL_TEMPLATE) - self._temperature_template = config.get(CONF_TEMPERATURE_TEMPLATE) - self._color_template = config.get(CONF_COLOR_TEMPLATE) - self._hs_template = config.get(CONF_HS_TEMPLATE) - self._rgb_template = config.get(CONF_RGB_TEMPLATE) - self._rgbw_template = config.get(CONF_RGBW_TEMPLATE) - self._rgbww_template = config.get(CONF_RGBWW_TEMPLATE) - self._effect_list_template = config.get(CONF_EFFECT_LIST_TEMPLATE) - self._effect_template = config.get(CONF_EFFECT_TEMPLATE) - self._max_mireds_template = config.get(CONF_MAX_MIREDS_TEMPLATE) - self._min_mireds_template = config.get(CONF_MIN_MIREDS_TEMPLATE) + self._template = config.get(CONF_STATE) + self._level_template = config.get(CONF_LEVEL) + self._temperature_template = config.get(CONF_TEMPERATURE) + self._hs_template = config.get(CONF_HS) + self._rgb_template = config.get(CONF_RGB) + self._rgbw_template = config.get(CONF_RGBW) + self._rgbww_template = config.get(CONF_RGBWW) + self._effect_list_template = config.get(CONF_EFFECT_LIST) + self._effect_template = config.get(CONF_EFFECT) + self._max_mireds_template = config.get(CONF_MAX_MIREDS) + self._min_mireds_template = config.get(CONF_MIN_MIREDS) self._supports_transition_template = config.get(CONF_SUPPORTS_TRANSITION) for action_id in (CONF_ON_ACTION, CONF_OFF_ACTION, CONF_EFFECT_ACTION): @@ -216,7 +318,6 @@ class LightTemplate(TemplateEntity, LightEntity): for action_id, color_mode in ( (CONF_TEMPERATURE_ACTION, ColorMode.COLOR_TEMP), (CONF_LEVEL_ACTION, ColorMode.BRIGHTNESS), - (CONF_COLOR_ACTION, ColorMode.HS), (CONF_HS_ACTION, ColorMode.HS), (CONF_RGB_ACTION, ColorMode.RGB), (CONF_RGBW_ACTION, ColorMode.RGBW), @@ -349,14 +450,6 @@ class LightTemplate(TemplateEntity, LightEntity): self._update_temperature, none_on_template_error=True, ) - if self._color_template: - self.add_template_attribute( - "_hs_color", - self._color_template, - None, - self._update_hs, - none_on_template_error=True, - ) if self._hs_template: self.add_template_attribute( "_hs_color", @@ -440,7 +533,7 @@ class LightTemplate(TemplateEntity, LightEntity): ) self._color_mode = ColorMode.COLOR_TEMP self._temperature = color_temp - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -450,11 +543,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbww_color = None optimistic_set = True - if ( - self._hs_template is None - and self._color_template is None - and ATTR_HS_COLOR in kwargs - ): + if self._hs_template is None and ATTR_HS_COLOR in kwargs: _LOGGER.debug( "Optimistically setting hs color to %s", kwargs[ATTR_HS_COLOR], @@ -480,7 +569,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgb_color = kwargs[ATTR_RGB_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgbw_template is None: self._rgbw_color = None @@ -497,7 +586,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbw_color = kwargs[ATTR_RGBW_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -514,7 +603,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbww_color = kwargs[ATTR_RGBWW_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -561,17 +650,6 @@ class LightTemplate(TemplateEntity, LightEntity): await self.async_run_script( effect_script, run_variables=common_params, context=self._context ) - elif ATTR_HS_COLOR in kwargs and ( - color_script := self._action_scripts.get(CONF_COLOR_ACTION) - ): - hs_value = kwargs[ATTR_HS_COLOR] - common_params["hs"] = hs_value - common_params["h"] = int(hs_value[0]) - common_params["s"] = int(hs_value[1]) - - await self.async_run_script( - color_script, run_variables=common_params, context=self._context - ) elif ATTR_HS_COLOR in kwargs and ( hs_script := self._action_scripts.get(CONF_HS_ACTION) ): diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index feaabc3b17c..b76fc28b83c 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -17,6 +17,7 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_DEVICE_ID, CONF_NAME, + CONF_STATE, CONF_SWITCHES, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -25,7 +26,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers import config_validation as cv, selector, template from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import ( @@ -35,16 +36,41 @@ from homeassistant.helpers.entity_platform import ( from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN +from .const import CONF_OBJECT_ID, CONF_PICTURE, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN from .template_entity import ( + LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS, + TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY, + TEMPLATE_ENTITY_ICON_SCHEMA, TemplateEntity, rewrite_common_legacy_to_modern_conf, ) _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] -SWITCH_SCHEMA = vol.All( +LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | { + CONF_VALUE_TEMPLATE: CONF_STATE, +} + +DEFAULT_NAME = "Template Switch" + + +SWITCH_SCHEMA = ( + vol.Schema( + { + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, + vol.Optional(CONF_STATE): cv.template, + vol.Required(CONF_TURN_ON): cv.SCRIPT_SCHEMA, + vol.Required(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Optional(CONF_PICTURE): cv.template, + } + ) + .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) + .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) +) + +LEGACY_SWITCH_SCHEMA = vol.All( cv.deprecated(ATTR_ENTITY_ID), vol.Schema( { @@ -59,13 +85,13 @@ SWITCH_SCHEMA = vol.All( ) PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( - {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} + {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(LEGACY_SWITCH_SCHEMA)} ) SWITCH_CONFIG_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.template, - vol.Optional(CONF_VALUE_TEMPLATE): cv.template, + vol.Optional(CONF_STATE): cv.template, vol.Optional(CONF_TURN_ON): cv.SCRIPT_SCHEMA, vol.Optional(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), @@ -73,24 +99,62 @@ SWITCH_CONFIG_SCHEMA = vol.Schema( ) -async def _async_create_entities(hass: HomeAssistant, config: ConfigType): - """Create the Template switches.""" +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, config: dict[str, dict] +) -> list[dict]: + """Rewrite legacy switch configuration definitions to modern ones.""" switches = [] - for object_id, entity_config in config[CONF_SWITCHES].items(): - entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) - unique_id = entity_config.get(CONF_UNIQUE_ID) + for object_id, entity_conf in config.items(): + entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id} + + entity_conf = rewrite_common_legacy_to_modern_conf( + hass, entity_conf, LEGACY_FIELDS + ) + + if CONF_NAME not in entity_conf: + entity_conf[CONF_NAME] = template.Template(object_id, hass) + + switches.append(entity_conf) + + return switches + + +def rewrite_options_to_moder_conf(option_config: dict[str, dict]) -> dict[str, dict]: + """Rewrite option configuration to modern configuration.""" + option_config = {**option_config} + + if CONF_VALUE_TEMPLATE in option_config: + option_config[CONF_STATE] = option_config.pop(CONF_VALUE_TEMPLATE) + + return option_config + + +@callback +def _async_create_template_tracking_entities( + async_add_entities: AddEntitiesCallback, + hass: HomeAssistant, + definitions: list[dict], + unique_id_prefix: str | None, +) -> None: + """Create the template switches.""" + switches = [] + + for entity_conf in definitions: + unique_id = entity_conf.get(CONF_UNIQUE_ID) + + if unique_id and unique_id_prefix: + unique_id = f"{unique_id_prefix}-{unique_id}" switches.append( SwitchTemplate( hass, - object_id, - entity_config, + entity_conf, unique_id, ) ) - return switches + async_add_entities(switches) async def async_setup_platform( @@ -100,7 +164,21 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the template switches.""" - async_add_entities(await _async_create_entities(hass, config)) + if discovery_info is None: + _async_create_template_tracking_entities( + async_add_entities, + hass, + rewrite_legacy_to_modern_conf(hass, config[CONF_SWITCHES]), + None, + ) + return + + _async_create_template_tracking_entities( + async_add_entities, + hass, + discovery_info["entities"], + discovery_info["unique_id"], + ) async def async_setup_entry( @@ -111,10 +189,9 @@ async def async_setup_entry( """Initialize config entry.""" _options = dict(config_entry.options) _options.pop("template_type") + _options = rewrite_options_to_moder_conf(_options) validated_config = SWITCH_CONFIG_SCHEMA(_options) - async_add_entities( - [SwitchTemplate(hass, None, validated_config, config_entry.entry_id)] - ) + async_add_entities([SwitchTemplate(hass, validated_config, config_entry.entry_id)]) @callback @@ -123,7 +200,7 @@ def async_create_preview_switch( ) -> SwitchTemplate: """Create a preview switch.""" validated_config = SWITCH_CONFIG_SCHEMA(config | {CONF_NAME: name}) - return SwitchTemplate(hass, None, validated_config, None) + return SwitchTemplate(hass, validated_config, None) class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): @@ -134,22 +211,19 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): def __init__( self, hass: HomeAssistant, - object_id, config: ConfigType, - unique_id, + unique_id: str | None, ) -> None: """Initialize the Template switch.""" - super().__init__( - hass, config=config, fallback_name=object_id, unique_id=unique_id - ) - if object_id is not None: + super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id) + if (object_id := config.get(CONF_OBJECT_ID)) is not None: self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, object_id, hass=hass ) name = self._attr_name if TYPE_CHECKING: assert name is not None - self._template = config.get(CONF_VALUE_TEMPLATE) + self._template = config.get(CONF_STATE) if on_action := config.get(CONF_TURN_ON): self.add_script(CONF_TURN_ON, on_action, name, DOMAIN) diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index 93ba1fa7471..88708278758 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -248,7 +248,7 @@ class _TemplateAttribute: return -class TemplateEntity(AbstractTemplateEntity): # pylint: disable=hass-enforce-class-module +class TemplateEntity(AbstractTemplateEntity): """Entity that uses templates to calculate attributes.""" _attr_available = True diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 27bfb9134ab..2642bd2f7d5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -5,12 +5,7 @@ from typing import Final from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import ( - EnergySpecific, - TeslaFleetApi, - VehicleSigned, - VehicleSpecific, -) +from tesla_fleet_api import TeslaFleetApi from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -128,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - vehicles: list[TeslaFleetVehicleData] = [] energysites: list[TeslaFleetEnergyData] = [] for product in products: - if "vin" in product and hasattr(tesla, "vehicle"): + if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] @@ -136,9 +131,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - if signing: if not tesla.private_key: await tesla.get_private_key(hass.config.path("tesla_fleet.key")) - api = VehicleSigned(tesla.vehicle, vin) + api = tesla.vehicles.createSigned(vin) else: - api = VehicleSpecific(tesla.vehicle, vin) + api = tesla.vehicles.createFleet(vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, entry, api, product) await coordinator.async_config_entry_first_refresh() @@ -160,7 +155,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - signing=signing, ) ) - elif "energy_site_id" in product and hasattr(tesla, "energy"): + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] if not ( product["components"]["battery"] @@ -173,7 +168,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - ) continue - api = EnergySpecific(tesla.energy, site_id) + api = tesla.energySites.create(site_id) live_coordinator = TeslaFleetEnergySiteLiveCoordinator(hass, entry, api) history_coordinator = TeslaFleetEnergySiteHistoryCoordinator( @@ -227,7 +222,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - # Setup Platforms entry.runtime_data = TeslaFleetData(vehicles, energysites, scopes) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/tesla_fleet/coordinator.py b/homeassistant/components/tesla_fleet/coordinator.py index 128c15068f6..50a69258a31 100644 --- a/homeassistant/components/tesla_fleet/coordinator.py +++ b/homeassistant/components/tesla_fleet/coordinator.py @@ -7,7 +7,6 @@ from random import randint from time import time from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, @@ -17,6 +16,7 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, VehicleOffline, ) +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -70,7 +70,7 @@ class TeslaFleetVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: VehicleSpecific, + api: VehicleFleet, product: dict, ) -> None: """Initialize TeslaFleet Vehicle Update Coordinator.""" @@ -149,7 +149,7 @@ class TeslaFleetEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize TeslaFleet Energy Site Live coordinator.""" super().__init__( @@ -202,7 +202,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Tesla Fleet Energy Site History coordinator.""" super().__init__( @@ -248,7 +248,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any self.updated_once = True # Add all time periods together - output = {key: 0 for key in ENERGY_HISTORY_FIELDS} + output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0) for period in data.get("time_series", []): for key in ENERGY_HISTORY_FIELDS: output[key] += period.get(key, 0) @@ -266,7 +266,7 @@ class TeslaFleetEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize TeslaFleet Energy Info coordinator.""" diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index 0260acf368e..583e92595d0 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -3,8 +3,9 @@ from abc import abstractmethod from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla.energysite import EnergySite +from tesla_fleet_api.tesla.vehicle.fleet import VehicleFleet from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo @@ -41,7 +42,7 @@ class TeslaFleetEntity( | TeslaFleetEnergySiteLiveCoordinator | TeslaFleetEnergySiteHistoryCoordinator | TeslaFleetEnergySiteInfoCoordinator, - api: VehicleSpecific | EnergySpecific, + api: VehicleFleet | EnergySite, key: str, ) -> None: """Initialize common aspects of a TeslaFleet entity.""" diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 53aff3d0a54..56dc49ad111 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.12"] + "requirements": ["tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tesla_fleet/models.py b/homeassistant/components/tesla_fleet/models.py index 469ebdca914..17a2bf50ed1 100644 --- a/homeassistant/components/tesla_fleet/models.py +++ b/homeassistant/components/tesla_fleet/models.py @@ -5,8 +5,8 @@ from __future__ import annotations import asyncio from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.helpers.device_registry import DeviceInfo @@ -31,7 +31,7 @@ class TeslaFleetData: class TeslaFleetVehicleData: """Data for a vehicle in the TeslaFleet integration.""" - api: VehicleSpecific + api: VehicleFleet coordinator: TeslaFleetVehicleDataCoordinator vin: str device: DeviceInfo @@ -43,7 +43,7 @@ class TeslaFleetVehicleData: class TeslaFleetEnergyData: """Data for a vehicle in the TeslaFleet integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslaFleetEnergySiteLiveCoordinator history_coordinator: TeslaFleetEnergySiteHistoryCoordinator info_coordinator: TeslaFleetEnergySiteInfoCoordinator diff --git a/homeassistant/components/tesla_fleet/number.py b/homeassistant/components/tesla_fleet/number.py index a1123ab9553..b4f7e42cafd 100644 --- a/homeassistant/components/tesla_fleet/number.py +++ b/homeassistant/components/tesla_fleet/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.components.number import ( NumberDeviceClass, @@ -33,7 +33,7 @@ PARALLEL_UPDATES = 0 class TeslaFleetNumberVehicleEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[VehicleSpecific, float], Awaitable[Any]] + func: Callable[[VehicleFleet, float], Awaitable[Any]] native_min_value: float native_max_value: float min_key: str | None = None @@ -74,7 +74,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetNumberVehicleEntityDescription, ...] = ( class TeslaFleetNumberBatteryEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index eef974cc5a7..b820d2d1b43 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -4,7 +4,6 @@ import asyncio from collections.abc import Callable from typing import Final -from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( Forbidden, @@ -12,6 +11,7 @@ from tesla_fleet_api.exceptions import ( SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry from teslemetry_stream import TeslemetryStream from homeassistant.config_entries import ConfigEntry @@ -111,7 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(teslemetry.vehicle, vin) + api = teslemetry.vehicles.create(vin) coordinator = TeslemetryVehicleDataCoordinator(hass, entry, api, product) device = DeviceInfo( identifiers={(DOMAIN, vin)}, @@ -156,7 +156,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - ) continue - api = EnergySpecific(teslemetry.energy, site_id) + api = teslemetry.energySites.create(site_id) device = DeviceInfo( identifiers={(DOMAIN, str(site_id))}, manufacturer="Tesla", diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 86811131ab6..c1c8fcd2f73 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -6,9 +6,11 @@ from itertools import chain from typing import Any, cast from tesla_fleet_api.const import CabinOverheatProtectionTemp, Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.climate import ( ATTR_HVAC_MODE, + HVAC_MODES, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -22,15 +24,32 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry from .const import DOMAIN, TeslemetryClimateSide -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData DEFAULT_MIN_TEMP = 15 DEFAULT_MAX_TEMP = 28 +COP_TEMPERATURES = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} +PRESET_MODES = { + "Off": "off", + "On": "keep", + "Dog": "dog", + "Party": "camp", +} + PARALLEL_UPDATES = 0 @@ -45,13 +64,21 @@ async def async_setup_entry( async_add_entities( chain( ( - TeslemetryClimateEntity( + TeslemetryPollingClimateEntity( + vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingClimateEntity( vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles ), ( - TeslemetryCabinOverheatProtectionEntity( + TeslemetryPollingCabinOverheatProtectionEntity( + vehicle, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingCabinOverheatProtectionEntity( vehicle, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles @@ -60,66 +87,22 @@ async def async_setup_entry( ) -class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle climate entity.""" +class TeslemetryClimateEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Climate Control.""" + + api: Vehicle _attr_precision = PRECISION_HALVES - _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.OFF] - _attr_supported_features = ( - ClimateEntityFeature.TURN_ON - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.PRESET_MODE - ) - _attr_preset_modes = ["off", "keep", "dog", "camp"] - - def __init__( - self, - data: TeslemetryVehicleData, - side: TeslemetryClimateSide, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - self.scoped = Scope.VEHICLE_CMDS in scopes - - if not self.scoped: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__( - data, - side, - ) - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - value = self.get("climate_state_is_climate_on") - if value: - self._attr_hvac_mode = HVACMode.HEAT_COOL - else: - self._attr_hvac_mode = HVACMode.OFF - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - self._attr_current_temperature = self.get("climate_state_inside_temp") - self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") - self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") - self._attr_min_temp = cast( - float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) - ) - self._attr_max_temp = cast( - float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) - ) + _attr_preset_modes = list(PRESET_MODES.values()) + _attr_fan_modes = ["off", "bioweapon"] + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_start()) self._attr_hvac_mode = HVACMode.HEAT_COOL @@ -127,19 +110,21 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_stop()) self._attr_hvac_mode = HVACMode.OFF self._attr_preset_mode = self._attr_preset_modes[0] + self._attr_fan_mode = self._attr_fan_modes[0] self.async_write_ha_state() async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" + if temp := kwargs.get(ATTR_TEMPERATURE): - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_temps( driver_temp=temp, @@ -163,18 +148,210 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the climate preset mode.""" - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_climate_keeper_mode( climate_keeper_mode=self._attr_preset_modes.index(preset_mode) ) ) self._attr_preset_mode = preset_mode - if preset_mode != self._attr_preset_modes[0]: - # Changing preset mode will also turn on climate + if preset_mode == self._attr_preset_modes[0]: + self._attr_hvac_mode = HVACMode.OFF + else: self._attr_hvac_mode = HVACMode.HEAT_COOL self.async_write_ha_state() + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set the Bioweapon defense mode.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command( + self.api.set_bioweapon_mode( + on=(fan_mode != "off"), + manual_override=True, + ) + ) + self._attr_fan_mode = fan_mode + if fan_mode == self._attr_fan_modes[1]: + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + +class TeslemetryPollingClimateEntity(TeslemetryClimateEntity, TeslemetryVehicleEntity): + """Polling vehicle climate entity.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.FAN_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + super().__init__(data, side) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + value = self.get("climate_state_is_climate_on") + if value is None: + self._attr_hvac_mode = None + if value: + self._attr_hvac_mode = HVACMode.HEAT_COOL + else: + self._attr_hvac_mode = HVACMode.OFF + + self._attr_current_temperature = self.get("climate_state_inside_temp") + self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") + self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") + if self.get("climate_state_bioweapon_mode"): + self._attr_fan_mode = "bioweapon" + else: + self._attr_fan_mode = "off" + self._attr_min_temp = cast( + float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) + ) + self._attr_max_temp = cast( + float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) + ) + + +class TeslemetryStreamingClimateEntity( + TeslemetryClimateEntity, TeslemetryVehicleStreamEntity, RestoreEntity +): + """Teslemetry steering wheel climate control.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + self.side = side + super().__init__( + data, + side, + ) + + self._attr_min_temp = cast( + float, + data.coordinator.data.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP), + ) + self._attr_max_temp = cast( + float, + data.coordinator.data.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP), + ) + self.rhd: bool = data.coordinator.data.get("vehicle_config_rhd", False) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("current_temperature") + self._attr_target_temperature = state.attributes.get("temperature") + self._attr_preset_mode = state.attributes.get("preset_mode") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacACEnabled( + self._async_handle_hvac_ac_enabled + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_ClimateKeeperMode( + self._async_handle_climate_keeper_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_RightHandDrive(self._async_handle_rhd) + ) + + if self.side == TeslemetryClimateSide.DRIVER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + elif self.side == TeslemetryClimateSide.PASSENGER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + + def _async_handle_inside_temp(self, data: float | None): + self._attr_current_temperature = data + self.async_write_ha_state() + + def _async_handle_hvac_ac_enabled(self, data: bool | None): + self._attr_hvac_mode = ( + None if data is None else HVACMode.HEAT_COOL if data else HVACMode.OFF + ) + self.async_write_ha_state() + + def _async_handle_climate_keeper_mode(self, data: str | None): + self._attr_preset_mode = PRESET_MODES.get(data) if data else None + self.async_write_ha_state() + + def _async_handle_hvac_temperature_request(self, data: float | None): + self._attr_target_temperature = data + self.async_write_ha_state() + + def _async_handle_rhd(self, data: bool | None): + if data is not None: + self.rhd = data + COP_MODES = { "Off": HVACMode.OFF, @@ -182,73 +359,27 @@ COP_MODES = { "FanOnly": HVACMode.FAN_ONLY, } -# String to celsius COP_LEVELS = { "Low": 30, "Medium": 35, "High": 40, } -# Celsius to IntEnum -TEMP_LEVELS = { - 30: CabinOverheatProtectionTemp.LOW, - 35: CabinOverheatProtectionTemp.MEDIUM, - 40: CabinOverheatProtectionTemp.HIGH, -} +class TeslemetryCabinOverheatProtectionEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Cabin Overheat Protection.""" -class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle cabin overheat protection entity.""" + api: Vehicle _attr_precision = PRECISION_WHOLE _attr_target_temperature_step = 5 - _attr_min_temp = COP_LEVELS["Low"] - _attr_max_temp = COP_LEVELS["High"] + _attr_min_temp = 30 + _attr_max_temp = 40 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _attr_entity_registry_enabled_default = False - def __init__( - self, - data: TeslemetryVehicleData, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - - self.scoped = Scope.VEHICLE_CMDS in scopes - if self.scoped: - self._attr_supported_features = ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - else: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__(data, "climate_state_cabin_overheat_protection") - - # Supported Features from data - if self.scoped and self.get("vehicle_config_cop_user_set_temp_supported"): - self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - - if (state := self.get("climate_state_cabin_overheat_protection")) is None: - self._attr_hvac_mode = None - else: - self._attr_hvac_mode = COP_MODES.get(state) - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - if (level := self.get("climate_state_cop_activation_temperature")) is None: - self._attr_target_temperature = None - else: - self._attr_target_temperature = COP_LEVELS.get(level) - - self._attr_current_temperature = self.get("climate_state_inside_temp") + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" @@ -260,26 +391,28 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - if (temp := kwargs.get(ATTR_TEMPERATURE)) is None or ( - cop_mode := TEMP_LEVELS.get(temp) - ) is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_cop_temp", - ) + if temp := kwargs.get(ATTR_TEMPERATURE): + if (cop_mode := COP_TEMPERATURES.get(temp)) is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_cop_temp", + ) + self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) - self._attr_target_temperature = temp + await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) + self._attr_target_temperature = temp if mode := kwargs.get(ATTR_HVAC_MODE): - await self._async_set_cop(mode) + # Set HVAC mode will call write_ha_state + await self.async_set_hvac_mode(mode) + else: + self.async_write_ha_state() - self.async_write_ha_state() + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) - async def _async_set_cop(self, hvac_mode: HVACMode) -> None: if hvac_mode == HVACMode.OFF: await handle_vehicle_command( self.api.set_cabin_overheat_protection(on=False, fan_only=False) @@ -294,10 +427,125 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn ) self._attr_hvac_mode = hvac_mode - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the climate mode and state.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await self._async_set_cop(hvac_mode) + self.async_write_ha_state() + + +class TeslemetryPollingCabinOverheatProtectionEntity( + TeslemetryVehicleEntity, TeslemetryCabinOverheatProtectionEntity +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + super().__init__( + data, + "climate_state_cabin_overheat_protection", + ) + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if self.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + if (state := self.get("climate_state_cabin_overheat_protection")) is None: + self._attr_hvac_mode = None + else: + self._attr_hvac_mode = COP_MODES.get(state) + + if (level := self.get("climate_state_cop_activation_temperature")) is None: + self._attr_target_temperature = None + else: + self._attr_target_temperature = COP_LEVELS.get(level) + + self._attr_current_temperature = self.get("climate_state_inside_temp") + + +class TeslemetryStreamingCabinOverheatProtectionEntity( + TeslemetryVehicleStreamEntity, + TeslemetryCabinOverheatProtectionEntity, + RestoreEntity, +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + super().__init__(data, "climate_state_cabin_overheat_protection") + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if data.coordinator.data.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("temperature") + self._attr_target_temperature = state.attributes.get("target_temperature") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionMode( + self._async_handle_protection_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionTemperatureLimit( + self._async_handle_temperature_limit + ) + ) + + def _async_handle_inside_temp(self, value: float | None): + self._attr_current_temperature = value + self.async_write_ha_state() + + def _async_handle_protection_mode(self, value: str | None): + self._attr_hvac_mode = COP_MODES.get(value) if value is not None else None + self.async_write_ha_state() + + def _async_handle_temperature_limit(self, value: str | None): + self._attr_target_temperature = ( + COP_LEVELS.get(value) if value is not None else None + ) self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/config_flow.py b/homeassistant/components/teslemetry/config_flow.py index d8cf2bd7945..a25a98d6c68 100644 --- a/homeassistant/components/teslemetry/config_flow.py +++ b/homeassistant/components/teslemetry/config_flow.py @@ -6,12 +6,12 @@ from collections.abc import Mapping from typing import Any from aiohttp import ClientConnectionError -from tesla_fleet_api import Teslemetry from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 0cd2a5a62d6..07549008a6c 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from datetime import datetime, timedelta from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -49,7 +49,7 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: VehicleSpecific, + api: Vehicle, product: dict, ) -> None: """Initialize Teslemetry Vehicle Update Coordinator.""" @@ -87,7 +87,7 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, data: dict, ) -> None: """Initialize Teslemetry Energy Site Live coordinator.""" @@ -133,7 +133,7 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" @@ -169,7 +169,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( @@ -192,7 +192,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): raise UpdateFailed(e.message) from e # Add all time periods together - output = {key: 0 for key in ENERGY_HISTORY_FIELDS} + output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0) for period in data.get("time_series", []): for key in ENERGY_HISTORY_FIELDS: output[key] += period.get(key, 0) diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index 82d3db123c3..3d145d24b0c 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -4,8 +4,8 @@ from abc import abstractmethod from typing import Any from propcache.api import cached_property -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import Signal from homeassistant.exceptions import ServiceValidationError @@ -29,7 +29,7 @@ class TeslemetryRootEntity(Entity): _attr_has_entity_name = True scoped: bool - api: VehicleSpecific | EnergySpecific + api: Vehicle | EnergySite def raise_for_scope(self, scope: Scope): """Raise an error if a scope is not available.""" @@ -105,7 +105,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): """Parent class for Teslemetry Vehicle entities.""" _last_update: int = 0 - api: VehicleSpecific + api: Vehicle vehicle: TeslemetryVehicleData def __init__( @@ -134,7 +134,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): class TeslemetryEnergyLiveEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Live entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -155,7 +155,7 @@ class TeslemetryEnergyLiveEntity(TeslemetryEntity): class TeslemetryEnergyInfoEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Info Entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -194,7 +194,7 @@ class TeslemetryWallConnectorEntity(TeslemetryEntity): """Parent class for Teslemetry Wall Connector Entities.""" _attr_has_entity_name = True - api: EnergySpecific + api: EnergySite def __init__( self, diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 4e9228acd2f..cae5a8f3c01 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.12", "teslemetry-stream==0.6.10"] + "requirements": ["tesla-fleet-api==1.0.16", "teslemetry-stream==0.6.12"] } diff --git a/homeassistant/components/teslemetry/media_player.py b/homeassistant/components/teslemetry/media_player.py index 1bfc9bf66dc..50f15618e66 100644 --- a/homeassistant/components/teslemetry/media_player.py +++ b/homeassistant/components/teslemetry/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -12,9 +13,14 @@ from homeassistant.components.media_player import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData @@ -24,8 +30,16 @@ STATES = { "Stopped": MediaPlayerState.IDLE, "Off": MediaPlayerState.OFF, } -VOLUME_MAX = 11.0 -VOLUME_STEP = 1.0 / 3 +DISPLAY_STATES = { + "On": MediaPlayerState.IDLE, + "Accessory": MediaPlayerState.IDLE, + "Charging": MediaPlayerState.OFF, + "Sentry": MediaPlayerState.OFF, + "Off": MediaPlayerState.OFF, +} +# Tesla uses 31 steps, in 0.333 increments up to 10.333 +VOLUME_STEP = 1 / 31 +VOLUME_FACTOR = 31 / 3 # 10.333 PARALLEL_UPDATES = 0 @@ -38,68 +52,99 @@ async def async_setup_entry( """Set up the Teslemetry Media platform from a config entry.""" async_add_entities( - TeslemetryMediaEntity(vehicle, Scope.VEHICLE_CMDS in entry.runtime_data.scopes) + TeslemetryPollingMediaEntity(vehicle, entry.runtime_data.scopes) + if vehicle.api.pre2021 or vehicle.firmware < "2025.2.6" + else TeslemetryStreamingMediaEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles ) -class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): - """Vehicle media player class.""" +class TeslemetryMediaEntity(TeslemetryRootEntity, MediaPlayerEntity): + """Base vehicle media player class.""" + + api: Vehicle _attr_device_class = MediaPlayerDeviceClass.SPEAKER - _attr_supported_features = ( - MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.PLAY - | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.VOLUME_SET - ) - _volume_max: float = VOLUME_MAX + _attr_volume_step = VOLUME_STEP + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level, range 0..1.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.adjust_volume(volume * VOLUME_FACTOR)) + self._attr_volume_level = volume + self.async_write_ha_state() + + async def async_media_play(self) -> None: + """Send play command.""" + if self.state != MediaPlayerState.PLAYING: + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PLAYING + self.async_write_ha_state() + + async def async_media_pause(self) -> None: + """Send pause command.""" + + if self.state == MediaPlayerState.PLAYING: + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PAUSED + self.async_write_ha_state() + + async def async_media_next_track(self) -> None: + """Send next track command.""" + + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command(self.api.media_next_track()) + + async def async_media_previous_track(self) -> None: + """Send previous track command.""" + + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command(self.api.media_prev_track()) + + +class TeslemetryPollingMediaEntity(TeslemetryVehicleEntity, TeslemetryMediaEntity): + """Polling vehicle media player class.""" def __init__( self, data: TeslemetryVehicleData, - scoped: bool, + scopes: list[Scope], ) -> None: """Initialize the media player entity.""" super().__init__(data, "media") - self.scoped = scoped - if not scoped: + + self._attr_supported_features = ( + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.VOLUME_SET + ) + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: self._attr_supported_features = MediaPlayerEntityFeature(0) def _async_update_attrs(self) -> None: """Update entity attributes.""" - self._volume_max = ( - self.get("vehicle_state_media_info_audio_volume_max") or VOLUME_MAX - ) - self._attr_state = STATES.get( - self.get("vehicle_state_media_info_media_playback_status") or "Off", - ) - self._attr_volume_step = ( - 1.0 - / self._volume_max - / ( - self.get("vehicle_state_media_info_audio_volume_increment") - or VOLUME_STEP - ) - ) + state = self.get("vehicle_state_media_info_media_playback_status") + self._attr_state = STATES.get(state) if state else None + self._attr_volume_level = ( + self.get("vehicle_state_media_info_audio_volume") or 0 + ) / VOLUME_FACTOR - if volume := self.get("vehicle_state_media_info_audio_volume"): - self._attr_volume_level = volume / self._volume_max - else: - self._attr_volume_level = None + duration = self.get("vehicle_state_media_info_now_playing_duration") + self._attr_media_duration = duration / 1000 if duration is not None else None - if duration := self.get("vehicle_state_media_info_now_playing_duration"): - self._attr_media_duration = duration / 1000 - else: - self._attr_media_duration = None - - if duration and ( - position := self.get("vehicle_state_media_info_now_playing_elapsed") - ): - self._attr_media_position = position / 1000 - else: - self._attr_media_position = None + # Return media position only when a media duration is > 0. + elapsed = self.get("vehicle_state_media_info_now_playing_elapsed") + self._attr_media_position = ( + elapsed / 1000 if duration and elapsed is not None else None + ) self._attr_media_title = self.get("vehicle_state_media_info_now_playing_title") self._attr_media_artist = self.get( @@ -113,42 +158,151 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): ) self._attr_source = self.get("vehicle_state_media_info_now_playing_source") - async def async_set_volume_level(self, volume: float) -> None: - """Set volume level, range 0..1.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command( - self.api.adjust_volume(int(volume * self._volume_max)) + +class TeslemetryStreamingMediaEntity( + TeslemetryVehicleStreamEntity, TeslemetryMediaEntity, RestoreEntity +): + """Streaming vehicle media player class.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the media player entity.""" + super().__init__(data, "media") + + self._attr_supported_features = ( + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.VOLUME_SET ) - self._attr_volume_level = volume + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = MediaPlayerEntityFeature(0) + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + try: + self._attr_state = MediaPlayerState(state.state) + except ValueError: + self._attr_state = None + self._attr_volume_level = state.attributes.get("volume_level") + self._attr_media_title = state.attributes.get("media_title") + self._attr_media_artist = state.attributes.get("media_artist") + self._attr_media_album_name = state.attributes.get("media_album_name") + self._attr_media_playlist = state.attributes.get("media_playlist") + self._attr_media_duration = state.attributes.get("media_duration") + self._attr_media_position = state.attributes.get("media_position") + self._attr_source = state.attributes.get("source") + + self.async_write_ha_state() + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CenterDisplay( + self._async_handle_center_display + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaPlaybackStatus( + self._async_handle_media_playback_status + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaPlaybackSource( + self._async_handle_media_playback_source + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaAudioVolume( + self._async_handle_media_audio_volume + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingDuration( + self._async_handle_media_now_playing_duration + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingElapsed( + self._async_handle_media_now_playing_elapsed + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingArtist( + self._async_handle_media_now_playing_artist + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingAlbum( + self._async_handle_media_now_playing_album + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingTitle( + self._async_handle_media_now_playing_title + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingStation( + self._async_handle_media_now_playing_station + ) + ) + + def _async_handle_center_display(self, value: str | None) -> None: + """Update entity attributes.""" + if value is not None: + self._attr_state = DISPLAY_STATES.get(value) + self.async_write_ha_state() + + def _async_handle_media_playback_status(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_state = MediaPlayerState.OFF if value is None else STATES.get(value) self.async_write_ha_state() - async def async_media_play(self) -> None: - """Send play command.""" - if self.state != MediaPlayerState.PLAYING: - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_toggle_playback()) - self._attr_state = MediaPlayerState.PLAYING - self.async_write_ha_state() + def _async_handle_media_playback_source(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_source = value + self.async_write_ha_state() - async def async_media_pause(self) -> None: - """Send pause command.""" - if self.state == MediaPlayerState.PLAYING: - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_toggle_playback()) - self._attr_state = MediaPlayerState.PAUSED - self.async_write_ha_state() + def _async_handle_media_audio_volume(self, value: float | None) -> None: + """Update entity attributes.""" + self._attr_volume_level = None if value is None else value / VOLUME_FACTOR + self.async_write_ha_state() - async def async_media_next_track(self) -> None: - """Send next track command.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_next_track()) + def _async_handle_media_now_playing_duration(self, value: int | None) -> None: + """Update entity attributes.""" + self._attr_media_duration = None if value is None else int(value / 1000) + self.async_write_ha_state() - async def async_media_previous_track(self) -> None: - """Send previous track command.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_prev_track()) + def _async_handle_media_now_playing_elapsed(self, value: int | None) -> None: + """Update entity attributes.""" + self._attr_media_position = None if value is None else int(value / 1000) + self.async_write_ha_state() + + def _async_handle_media_now_playing_artist(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_artist = value # Check if this is album artist or not + self.async_write_ha_state() + + def _async_handle_media_now_playing_album(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_album_name = value + self.async_write_ha_state() + + def _async_handle_media_now_playing_title(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_title = value + self.async_write_ha_state() + + def _async_handle_media_now_playing_station(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_channel = ( + value # could also be _attr_media_playlist when Spotify + ) + self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index 5b78386c68a..fd6cf12b5b9 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -6,8 +6,8 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStream, TeslemetryStreamVehicle from homeassistant.config_entries import ConfigEntry @@ -34,7 +34,7 @@ class TeslemetryData: class TeslemetryVehicleData: """Data for a vehicle in the Teslemetry integration.""" - api: VehicleSpecific + api: Vehicle config_entry: ConfigEntry coordinator: TeslemetryVehicleDataCoordinator stream: TeslemetryStream @@ -50,7 +50,7 @@ class TeslemetryVehicleData: class TeslemetryEnergyData: """Data for a vehicle in the Teslemetry integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslemetryEnergySiteLiveCoordinator | None info_coordinator: TeslemetryEnergySiteInfoCoordinator history_coordinator: TeslemetryEnergyHistoryCoordinator | None diff --git a/homeassistant/components/teslemetry/number.py b/homeassistant/components/teslemetry/number.py index 10c15a68b09..ff25dec59b8 100644 --- a/homeassistant/components/teslemetry/number.py +++ b/homeassistant/components/teslemetry/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.number import ( @@ -46,7 +46,7 @@ PARALLEL_UPDATES = 0 class TeslemetryNumberVehicleEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[VehicleSpecific, int], Awaitable[Any]] + func: Callable[[Vehicle, int], Awaitable[Any]] min_key: str | None = None max_key: str native_min_value: float @@ -99,7 +99,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryNumberVehicleEntityDescription, ...] = ( class TeslemetryNumberBatteryEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None scopes: list[Scope] diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py index 0d268e302de..9e13d15edc4 100644 --- a/homeassistant/components/teslemetry/select.py +++ b/homeassistant/components/teslemetry/select.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import VehicleSpecific from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode, Scope, Seat +from tesla_fleet_api.teslemetry import Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.select import SelectEntity, SelectEntityDescription @@ -40,7 +40,7 @@ LEVEL = {OFF: 0, LOW: 1, MEDIUM: 2, HIGH: 3} class TeslemetrySelectEntityDescription(SelectEntityDescription): """Seat Heater entity description.""" - select_fn: Callable[[VehicleSpecific, int], Awaitable[Any]] + select_fn: Callable[[Vehicle, int], Awaitable[Any]] supported_fn: Callable[[dict], bool] = lambda _: True streaming_listener: ( Callable[ diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index f1859ad39de..b1c6b487bf9 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta from propcache.api import cached_property -from teslemetry_stream import Signal +from teslemetry_stream import Signal, TeslemetryStreamVehicle from teslemetry_stream.const import ShiftState from homeassistant.components.sensor import ( @@ -50,6 +50,7 @@ from .models import TeslemetryEnergyData, TeslemetryVehicleData PARALLEL_UPDATES = 0 + CHARGE_STATES = { "Starting": "starting", "Charging": "charging", @@ -350,21 +351,26 @@ class TeslemetryTimeEntityDescription(SensorEntityDescription): """Describes Teslemetry Sensor entity.""" variance: int - streaming_key: Signal + streaming_listener: Callable[ + [TeslemetryStreamVehicle, Callable[[float | None], None]], + Callable[[], None], + ] streaming_firmware: str = "2024.26" + streaming_value_fn: Callable[[float], float] = lambda x: x VEHICLE_TIME_DESCRIPTIONS: tuple[TeslemetryTimeEntityDescription, ...] = ( TeslemetryTimeEntityDescription( key="charge_state_minutes_to_full_charge", - streaming_key=Signal.TIME_TO_FULL_CHARGE, + streaming_value_fn=lambda x: x * 60, + streaming_listener=lambda x, y: x.listen_TimeToFullCharge(y), device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, variance=4, ), TeslemetryTimeEntityDescription( key="drive_state_active_route_minutes_to_arrival", - streaming_key=Signal.MINUTES_TO_ARRIVAL, + streaming_listener=lambda x, y: x.listen_MinutesToArrival(y), device_class=SensorDeviceClass.TIMESTAMP, variance=1, ), @@ -667,18 +673,22 @@ class TeslemetryStreamTimeSensorEntity(TeslemetryVehicleStreamEntity, SensorEnti """Initialize the sensor.""" self.entity_description = description self._get_timestamp = ignore_variance( - func=lambda value: dt_util.now() + timedelta(minutes=value), + func=lambda value: dt_util.now() + + timedelta(minutes=description.streaming_value_fn(value)), ignored_variance=timedelta(minutes=description.variance), ) - assert description.streaming_key - super().__init__(data, description.key, description.streaming_key) + super().__init__(data, description.key) - @cached_property - def available(self) -> bool: - """Return True if entity is available.""" - return self.stream.connected + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self.entity_description.streaming_listener( + self.vehicle.stream_vehicle, self._value_callback + ) + ) - def _async_value_from_stream(self, value) -> None: + def _value_callback(self, value: float | None) -> None: """Update the value of the entity.""" if value is None: self._attr_native_value = None diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index 9dc17fd2ef7..ceb8b3c1af9 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -132,7 +132,7 @@ "name": "Tire pressure warning rear right" }, "pin_to_drive_enabled": { - "name": "Pin to drive enabled" + "name": "PIN to Drive enabled" }, "drive_rail": { "name": "Drive rail" @@ -226,6 +226,12 @@ "dog": "Dog mode", "camp": "Camp mode" } + }, + "fan_mode": { + "state": { + "off": "[%key:common::state::off%]", + "bioweapon": "Bioweapon defense" + } } } } diff --git a/homeassistant/components/teslemetry/switch.py b/homeassistant/components/teslemetry/switch.py index 83441e6c4f6..516a6f9852f 100644 --- a/homeassistant/components/teslemetry/switch.py +++ b/homeassistant/components/teslemetry/switch.py @@ -7,7 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api.const import Scope, Seat +from tesla_fleet_api.const import Scope +from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.switch import ( SwitchDeviceClass, @@ -16,10 +17,16 @@ from homeassistant.components.switch import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import StateType from . import TeslemetryConfigEntry -from .entity import TeslemetryEnergyInfoEntity, TeslemetryVehicleEntity +from .entity import ( + TeslemetryEnergyInfoEntity, + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_command, handle_vehicle_command from .models import TeslemetryEnergyData, TeslemetryVehicleData @@ -34,36 +41,41 @@ class TeslemetrySwitchEntityDescription(SwitchEntityDescription): off_func: Callable scopes: list[Scope] value_func: Callable[[StateType], bool] = bool + streaming_listener: Callable[ + [TeslemetryStreamVehicle, Callable[[StateType], None]], + Callable[[], None], + ] + streaming_value_fn: Callable[[StateType], bool] = bool + streaming_firmware: str = "2024.26" unique_id: str | None = None VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( TeslemetrySwitchEntityDescription( key="vehicle_state_sentry_mode", + streaming_listener=lambda x, y: x.listen_SentryMode(y), + streaming_value_fn=lambda x: x != "Off", on_func=lambda api: api.set_sentry_mode(on=True), off_func=lambda api: api.set_sentry_mode(on=False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_left", - on_func=lambda api: api.remote_auto_seat_climate_request(Seat.FRONT_LEFT, True), - off_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_LEFT, False - ), + streaming_listener=lambda x, y: x.listen_AutoSeatClimateLeft(y), + on_func=lambda api: api.remote_auto_seat_climate_request(1, True), + off_func=lambda api: api.remote_auto_seat_climate_request(1, False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_right", - on_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, True - ), - off_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, False - ), + streaming_listener=lambda x, y: x.listen_AutoSeatClimateRight(y), + on_func=lambda api: api.remote_auto_seat_climate_request(2, True), + off_func=lambda api: api.remote_auto_seat_climate_request(2, False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_steering_wheel_heat", + streaming_listener=lambda x, y: x.listen_HvacSteeringWheelHeatAuto(y), on_func=lambda api: api.remote_auto_steering_wheel_heat_climate_request( on=True ), @@ -74,6 +86,8 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( ), TeslemetrySwitchEntityDescription( key="climate_state_defrost_mode", + streaming_listener=lambda x, y: x.listen_DefrostMode(y), + streaming_value_fn=lambda x: x != "Off", on_func=lambda api: api.set_preconditioning_max(on=True, manual_override=False), off_func=lambda api: api.set_preconditioning_max( on=False, manual_override=False @@ -83,9 +97,11 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( TeslemetrySwitchEntityDescription( key="charge_state_charging_state", unique_id="charge_state_user_charge_enable_request", + value_func=lambda state: state in {"Starting", "Charging"}, + streaming_listener=lambda x, y: x.listen_DetailedChargeState(y), + streaming_value_fn=lambda x: x in {"Starting", "Charging"}, on_func=lambda api: api.charge_start(), off_func=lambda api: api.charge_stop(), - value_func=lambda state: state in {"Starting", "Charging"}, scopes=[Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS], ), ) @@ -101,12 +117,16 @@ async def async_setup_entry( async_add_entities( chain( ( - TeslemetryVehicleSwitchEntity( + TeslemetryPollingVehicleSwitchEntity( + vehicle, description, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 + or vehicle.firmware < description.streaming_firmware + else TeslemetryStreamingVehicleSwitchEntity( vehicle, description, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles for description in VEHICLE_DESCRIPTIONS - if description.key in vehicle.coordinator.data ), ( TeslemetryChargeFromGridSwitchEntity( @@ -126,15 +146,31 @@ async def async_setup_entry( ) -class TeslemetrySwitchEntity(SwitchEntity): +class TeslemetryVehicleSwitchEntity(TeslemetryRootEntity, SwitchEntity): """Base class for all Teslemetry switch entities.""" _attr_device_class = SwitchDeviceClass.SWITCH entity_description: TeslemetrySwitchEntityDescription + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Switch.""" + self.raise_for_scope(self.entity_description.scopes[0]) + await handle_vehicle_command(self.entity_description.on_func(self.api)) + self._attr_is_on = True + self.async_write_ha_state() -class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEntity): - """Base class for Teslemetry vehicle switch entities.""" + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Switch.""" + self.raise_for_scope(self.entity_description.scopes[0]) + await handle_vehicle_command(self.entity_description.off_func(self.api)) + self._attr_is_on = False + self.async_write_ha_state() + + +class TeslemetryPollingVehicleSwitchEntity( + TeslemetryVehicleEntity, TeslemetryVehicleSwitchEntity +): + """Base class for Teslemetry polling vehicle switch entities.""" def __init__( self, @@ -151,30 +187,63 @@ class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEnt def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" - self._attr_is_on = self.entity_description.value_func(self._value) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn on the Switch.""" - self.raise_for_scope(self.entity_description.scopes[0]) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.entity_description.on_func(self.api)) - self._attr_is_on = True - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn off the Switch.""" - self.raise_for_scope(self.entity_description.scopes[0]) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.entity_description.off_func(self.api)) - self._attr_is_on = False - self.async_write_ha_state() + self._attr_is_on = ( + None + if self._value is None + else self.entity_description.value_func(self._value) + ) -class TeslemetryChargeFromGridSwitchEntity( - TeslemetryEnergyInfoEntity, TeslemetrySwitchEntity +class TeslemetryStreamingVehicleSwitchEntity( + TeslemetryVehicleStreamEntity, TeslemetryVehicleSwitchEntity, RestoreEntity ): + """Base class for Teslemetry streaming vehicle switch entities.""" + + def __init__( + self, + data: TeslemetryVehicleData, + description: TeslemetrySwitchEntityDescription, + scopes: list[Scope], + ) -> None: + """Initialize the Switch.""" + + self.entity_description = description + self.scoped = any(scope in scopes for scope in description.scopes) + super().__init__(data, description.key) + if description.unique_id: + self._attr_unique_id = f"{data.vin}-{description.unique_id}" + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + # Restore previous state + if (state := await self.async_get_last_state()) is not None: + if state.state == "on": + self._attr_is_on = True + elif state.state == "off": + self._attr_is_on = False + + # Add listener + self.async_on_remove( + self.entity_description.streaming_listener( + self.vehicle.stream_vehicle, self._value_callback + ) + ) + + def _value_callback(self, value: StateType) -> None: + """Update the value of the entity.""" + self._attr_is_on = ( + None if value is None else self.entity_description.streaming_value_fn(value) + ) + self.async_write_ha_state() + + +class TeslemetryChargeFromGridSwitchEntity(TeslemetryEnergyInfoEntity, SwitchEntity): """Entity class for Charge From Grid switch.""" + _attr_device_class = SwitchDeviceClass.SWITCH + def __init__( self, data: TeslemetryEnergyData, @@ -215,11 +284,11 @@ class TeslemetryChargeFromGridSwitchEntity( self.async_write_ha_state() -class TeslemetryStormModeSwitchEntity( - TeslemetryEnergyInfoEntity, TeslemetrySwitchEntity -): +class TeslemetryStormModeSwitchEntity(TeslemetryEnergyInfoEntity, SwitchEntity): """Entity class for Storm Mode switch.""" + _attr_device_class = SwitchDeviceClass.SWITCH + def __init__( self, data: TeslemetryEnergyData, diff --git a/homeassistant/components/teslemetry/update.py b/homeassistant/components/teslemetry/update.py index f560f25a8ff..b8d40877de4 100644 --- a/homeassistant/components/teslemetry/update.py +++ b/homeassistant/components/teslemetry/update.py @@ -2,16 +2,22 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.update import UpdateEntity, UpdateEntityFeature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData @@ -32,12 +38,31 @@ async def async_setup_entry( """Set up the Teslemetry update platform from a config entry.""" async_add_entities( - TeslemetryUpdateEntity(vehicle, entry.runtime_data.scopes) + TeslemetryPollingUpdateEntity(vehicle, entry.runtime_data.scopes) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingUpdateEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles ) -class TeslemetryUpdateEntity(TeslemetryVehicleEntity, UpdateEntity): +class TeslemetryUpdateEntity(TeslemetryRootEntity, UpdateEntity): + """Teslemetry Updates entity.""" + + api: Vehicle + _attr_supported_features = UpdateEntityFeature.PROGRESS + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install an update.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.schedule_software_update(offset_sec=0)) + self._attr_in_progress = True + self.async_write_ha_state() + + +class TeslemetryPollingUpdateEntity(TeslemetryVehicleEntity, TeslemetryUpdateEntity): """Teslemetry Updates entity.""" def __init__( @@ -94,18 +119,125 @@ class TeslemetryUpdateEntity(TeslemetryVehicleEntity, UpdateEntity): ): self._attr_in_progress = True if install_perc := self.get("vehicle_state_software_update_install_perc"): - self._attr_update_percentage = cast(int, install_perc) + self._attr_update_percentage = install_perc else: self._attr_in_progress = False self._attr_update_percentage = None - async def async_install( - self, version: str | None, backup: bool, **kwargs: Any + +class TeslemetryStreamingUpdateEntity( + TeslemetryVehicleStreamEntity, TeslemetryUpdateEntity, RestoreEntity +): + """Teslemetry Updates entity.""" + + _download_percentage: int = 0 + _install_percentage: int = 0 + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], ) -> None: - """Install an update.""" - self.raise_for_scope(Scope.ENERGY_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.schedule_software_update(offset_sec=60)) - self._attr_in_progress = True - self._attr_update_percentage = None + """Initialize the Update.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + super().__init__( + data, + "vehicle_state_software_update_status", + ) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_in_progress = state.attributes.get("in_progress", False) + self._install_percentage = state.attributes.get("install_percentage", False) + self._attr_installed_version = state.attributes.get("installed_version") + self._attr_latest_version = state.attributes.get("latest_version") + self._attr_supported_features = UpdateEntityFeature( + state.attributes.get( + "supported_features", self._attr_supported_features + ) + ) + self.async_write_ha_state() + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateDownloadPercentComplete( + self._async_handle_software_update_download_percent_complete + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateInstallationPercentComplete( + self._async_handle_software_update_installation_percent_complete + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateScheduledStartTime( + self._async_handle_software_update_scheduled_start_time + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateVersion( + self._async_handle_software_update_version + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_Version(self._async_handle_version) + ) + + def _async_handle_software_update_download_percent_complete( + self, value: float | None + ): + """Handle software update download percent complete.""" + + self._download_percentage = round(value) if value is not None else 0 + if self.scoped and self._download_percentage == 100: + self._attr_supported_features = ( + UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL + ) + else: + self._attr_supported_features = UpdateEntityFeature.PROGRESS + self._async_update_progress() self.async_write_ha_state() + + def _async_handle_software_update_installation_percent_complete( + self, value: float | None + ): + """Handle software update installation percent complete.""" + + self._install_percentage = round(value) if value is not None else 0 + self._async_update_progress() + self.async_write_ha_state() + + def _async_handle_software_update_scheduled_start_time(self, value: str | None): + """Handle software update scheduled start time.""" + + self._attr_in_progress = value is not None + self.async_write_ha_state() + + def _async_handle_software_update_version(self, value: str | None): + """Handle software update version.""" + + self._attr_latest_version = ( + value if value and value != " " else self._attr_installed_version + ) + self.async_write_ha_state() + + def _async_handle_version(self, value: str | None): + """Handle version.""" + + if value is not None: + self._attr_installed_version = value.split(" ")[0] + self.async_write_ha_state() + + def _async_update_progress(self) -> None: + """Update the progress of the update.""" + + if self._download_percentage > 1 and self._download_percentage < 100: + self._attr_in_progress = True + self._attr_update_percentage = self._download_percentage + elif self._install_percentage > 1: + self._attr_in_progress = True + self._attr_update_percentage = self._install_percentage + else: + self._attr_in_progress = False + self._attr_update_percentage = None diff --git a/homeassistant/components/tessie/__init__.py b/homeassistant/components/tessie/__init__.py index f73ecc7a729..e247931e3ba 100644 --- a/homeassistant/components/tessie/__init__.py +++ b/homeassistant/components/tessie/__init__.py @@ -5,9 +5,9 @@ from http import HTTPStatus import logging from aiohttp import ClientError, ClientResponseError -from tesla_fleet_api import EnergySpecific, Tessie from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import TeslaFleetError +from tesla_fleet_api.tessie import Tessie from tessie_api import get_state_of_all_vehicles from homeassistant.config_entries import ConfigEntry @@ -123,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo ) continue - api = EnergySpecific(tessie.energy, site_id) + api = tessie.energySites.create(site_id) energysites.append( TessieEnergyData( api=api, diff --git a/homeassistant/components/tessie/coordinator.py b/homeassistant/components/tessie/coordinator.py index b06fe6123a5..2382595b058 100644 --- a/homeassistant/components/tessie/coordinator.py +++ b/homeassistant/components/tessie/coordinator.py @@ -8,8 +8,8 @@ import logging from typing import TYPE_CHECKING, Any from aiohttp import ClientResponseError -from tesla_fleet_api import EnergySpecific from tesla_fleet_api.exceptions import InvalidToken, MissingToken, TeslaFleetError +from tesla_fleet_api.tessie import EnergySite from tessie_api import get_state, get_status from homeassistant.core import HomeAssistant @@ -102,7 +102,7 @@ class TessieEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Site Live coordinator.""" super().__init__( @@ -138,7 +138,7 @@ class TessieEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Info coordinator.""" super().__init__( diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index d4ac56883e8..3f96bb226ab 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.12"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tessie/models.py b/homeassistant/components/tessie/models.py index ca670b9650b..03652782cfe 100644 --- a/homeassistant/components/tessie/models.py +++ b/homeassistant/components/tessie/models.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from homeassistant.helpers.device_registry import DeviceInfo @@ -27,7 +27,7 @@ class TessieData: class TessieEnergyData: """Data for a Energy Site in the Tessie integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TessieEnergySiteLiveCoordinator info_coordinator: TessieEnergySiteInfoCoordinator id: int diff --git a/homeassistant/components/tessie/number.py b/homeassistant/components/tessie/number.py index 1e857345278..77d8037fb14 100644 --- a/homeassistant/components/tessie/number.py +++ b/homeassistant/components/tessie/number.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from tessie_api import set_charge_limit, set_charging_amps, set_speed_limit from homeassistant.components.number import ( @@ -90,7 +90,7 @@ VEHICLE_DESCRIPTIONS: tuple[TessieNumberEntityDescription, ...] = ( class TessieNumberBatteryEntityDescription(NumberEntityDescription): """Describes Tessie Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str diff --git a/homeassistant/components/thethingsnetwork/sensor.py b/homeassistant/components/thethingsnetwork/sensor.py index ba512d07f18..5aa851d99ae 100644 --- a/homeassistant/components/thethingsnetwork/sensor.py +++ b/homeassistant/components/thethingsnetwork/sensor.py @@ -40,7 +40,7 @@ async def async_setup_entry( if (device_id, field_id) not in sensors and isinstance(ttn_value, TTNSensorValue) } - if len(new_sensors): + if new_sensors: async_add_entities(new_sensors.values()) sensors.update(new_sensors.keys()) diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index 2de9ebd1ec6..e565fdc7dd8 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -9,7 +9,11 @@ from typing import cast import tibber from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -159,7 +163,7 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): ) metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{home.name} {sensor_type}", source=TIBBER_DOMAIN, diff --git a/homeassistant/components/totalconnect/diagnostics.py b/homeassistant/components/totalconnect/diagnostics.py index f42ed5e44c3..fc310bf850c 100644 --- a/homeassistant/components/totalconnect/diagnostics.py +++ b/homeassistant/components/totalconnect/diagnostics.py @@ -83,6 +83,7 @@ async def async_get_config_entry_diagnostics( "is_new_partition": partition.is_new_partition, "is_night_stay_enabled": partition.is_night_stay_enabled, "exit_delay_timer": partition.exit_delay_timer, + "arming_state": partition.arming_state, } new_location["partitions"].append(new_partition) diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index af20b54675b..274f2815330 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.3"] + "requirements": ["tplink-omada-client==1.4.4"] } diff --git a/homeassistant/components/tractive/device_tracker.py b/homeassistant/components/tractive/device_tracker.py index 73be7216a2f..bd1380ade4c 100644 --- a/homeassistant/components/tractive/device_tracker.py +++ b/homeassistant/components/tractive/device_tracker.py @@ -55,11 +55,9 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): @property def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" + """Return the source type of the device.""" if self._source_type == "PHONE": return SourceType.BLUETOOTH - if self._source_type == "KNOWN_WIFI": - return SourceType.ROUTER return SourceType.GPS @property diff --git a/homeassistant/components/tradfri/config_flow.py b/homeassistant/components/tradfri/config_flow.py index 9f5b39a9657..f4adb1cc09e 100644 --- a/homeassistant/components/tradfri/config_flow.py +++ b/homeassistant/components/tradfri/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from typing import Any +from typing import Any, cast from uuid import uuid4 from pytradfri import Gateway, RequestError @@ -54,7 +54,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - host = user_input.get(CONF_HOST, self._host) + host = cast(str, user_input.get(CONF_HOST, self._host)) try: auth = await authenticate( self.hass, host, user_input[KEY_SECURITY_CODE] diff --git a/homeassistant/components/tradfri/strings.json b/homeassistant/components/tradfri/strings.json index 9ed7e167e71..66c46dd482e 100644 --- a/homeassistant/components/tradfri/strings.json +++ b/homeassistant/components/tradfri/strings.json @@ -6,7 +6,7 @@ "description": "You can find the security code on the back of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", - "security_code": "Security Code" + "security_code": "Security code" }, "data_description": { "host": "Hostname or IP address of your Trådfri gateway." @@ -14,7 +14,7 @@ } }, "error": { - "invalid_security_code": "Failed to register with provided key. If this keeps happening, try restarting the gateway.", + "invalid_security_code": "Failed to register with provided code. If this keeps happening, try restarting the gateway.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout": "Timeout validating the code.", "cannot_authenticate": "Cannot authenticate, is Gateway paired with another server like e.g. Homekit?" diff --git a/homeassistant/components/trafikverket_camera/strings.json b/homeassistant/components/trafikverket_camera/strings.json index b6e2209fc57..8fdc6357156 100644 --- a/homeassistant/components/trafikverket_camera/strings.json +++ b/homeassistant/components/trafikverket_camera/strings.json @@ -18,7 +18,7 @@ "location": "[%key:common::config_flow::data::location%]" }, "data_description": { - "location": "Equal or part of name, description or camera id. Be as specific as possible to avoid getting multiple cameras as result" + "location": "Equal or part of name, description or camera ID. Be as specific as possible to avoid getting multiple cameras as result" } }, "multiple_cameras": { @@ -60,7 +60,7 @@ "name": "[%key:common::config_flow::data::location%]" }, "photo_url": { - "name": "Photo url" + "name": "Photo URL" }, "status": { "name": "Status" @@ -87,7 +87,7 @@ "name": "Photo time" }, "photo_url": { - "name": "Photo url" + "name": "Photo URL" }, "status": { "name": "Status" diff --git a/homeassistant/components/trafikverket_ferry/config_flow.py b/homeassistant/components/trafikverket_ferry/config_flow.py index 002dc421273..dfa64ed2953 100644 --- a/homeassistant/components/trafikverket_ferry/config_flow.py +++ b/homeassistant/components/trafikverket_ferry/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket import TrafikverketFerry @@ -17,6 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_FROM, CONF_TIME, CONF_TO, DOMAIN from .util import create_unique_id +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_API_KEY): selector.TextSelector( @@ -81,7 +84,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -120,7 +124,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: if not errors: diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index f6a58e464a1..eb0a4a45791 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -86,8 +86,8 @@ async def validate_station( except UnknownError as error: _LOGGER.error("Unknown error occurred during validation %s", str(error)) errors["base"] = "cannot_connect" - except Exception as error: # noqa: BLE001 - _LOGGER.error("Unknown exception occurred during validation %s", str(error)) + except Exception: + _LOGGER.exception("Unknown exception occurred during validation") errors["base"] = "cannot_connect" return (stations, errors) diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index f4316b887b3..ee9fe264692 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket.exceptions import ( @@ -25,6 +26,8 @@ from homeassistant.helpers.selector import ( from .const import CONF_STATION, DOMAIN +_LOGGER = logging.getLogger(__name__) + class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Weatherstation integration.""" @@ -56,7 +59,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "cannot_connect" else: return self.async_create_entry( @@ -102,7 +106,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -132,7 +137,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/trafikverket_weatherstation/sensor.py b/homeassistant/components/trafikverket_weatherstation/sensor.py index cb923037a24..bbc6764e3ef 100644 --- a/homeassistant/components/trafikverket_weatherstation/sensor.py +++ b/homeassistant/components/trafikverket_weatherstation/sensor.py @@ -89,7 +89,8 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( translation_key="wind_direction", value_fn=lambda data: data.winddirection, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), TrafikverketSensorEntityDescription( key="wind_speed", diff --git a/homeassistant/components/triggercmd/config_flow.py b/homeassistant/components/triggercmd/config_flow.py index fc02dd0b2fc..48c4eacfd5a 100644 --- a/homeassistant/components/triggercmd/config_flow.py +++ b/homeassistant/components/triggercmd/config_flow.py @@ -57,7 +57,7 @@ class TriggerCMDConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_TOKEN] = "invalid_token" except TRIGGERcmdConnectionError: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 98ce76cafde..cb207643471 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -17,7 +17,7 @@ import secrets import subprocess import tempfile from time import monotonic -from typing import Any, Final, TypedDict +from typing import Any, Final from aiohttp import web import mutagen @@ -62,7 +62,7 @@ from .const import ( DOMAIN, TtsAudioType, ) -from .entity import TextToSpeechEntity +from .entity import TextToSpeechEntity, TTSAudioRequest from .helper import get_engine_instance from .legacy import PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, Provider, async_setup_legacy from .media_source import generate_media_source_id, media_source_id_to_kwargs @@ -123,13 +123,94 @@ KEY_PATTERN = "{0}_{1}_{2}_{3}" SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({}) -class TTSCache(TypedDict): - """Cached TTS file.""" +class TTSCache: + """Cached bytes of a TTS result.""" - extension: str - voice: bytes - pending: asyncio.Task | None - last_used: float + _result_data: bytes | None = None + """When fully loaded, contains the result data.""" + + _partial_data: list[bytes] | None = None + """While loading, contains the data already received from the generator.""" + + _loading_error: Exception | None = None + """If an error occurred while loading, contains the error.""" + + _consumers: list[asyncio.Queue[bytes | None]] | None = None + """A queue for each current consumer to notify of new data while the generator is loading.""" + + def __init__( + self, + cache_key: str, + extension: str, + data_gen: AsyncGenerator[bytes], + ) -> None: + """Initialize the TTS cache.""" + self.cache_key = cache_key + self.extension = extension + self.last_used = monotonic() + self._data_gen = data_gen + + async def async_load_data(self) -> bytes: + """Load the data from the generator.""" + if self._result_data is not None or self._partial_data is not None: + raise RuntimeError("Data already being loaded") + + self._partial_data = [] + self._consumers = [] + + try: + async for chunk in self._data_gen: + self._partial_data.append(chunk) + for queue in self._consumers: + queue.put_nowait(chunk) + except Exception as err: + self._loading_error = err + raise + finally: + for queue in self._consumers: + queue.put_nowait(None) + self._consumers = None + + self._result_data = b"".join(self._partial_data) + self._partial_data = None + return self._result_data + + async def async_stream_data(self) -> AsyncGenerator[bytes]: + """Stream the data. + + Will return all data already returned from the generator. + Will listen for future data returned from the generator. + Raises error if one occurred. + """ + if self._result_data is not None: + yield self._result_data + return + if self._loading_error: + raise self._loading_error + + if self._partial_data is None: + raise RuntimeError("Data not being loaded") + + queue: asyncio.Queue[bytes | None] | None = None + # Check if generator is still feeding data + if self._consumers is not None: + queue = asyncio.Queue() + self._consumers.append(queue) + + for chunk in list(self._partial_data): + yield chunk + + if self._loading_error: + raise self._loading_error + + if queue is not None: + while (chunk2 := await queue.get()) is not None: + yield chunk2 + + if self._loading_error: + raise self._loading_error + + self.last_used = monotonic() @callback @@ -182,16 +263,23 @@ def async_create_stream( ) +@callback +def async_get_stream(hass: HomeAssistant, token: str) -> ResultStream | None: + """Return a result stream given a token.""" + return hass.data[DATA_TTS_MANAGER].token_to_stream.get(token) + + async def async_get_media_source_audio( hass: HomeAssistant, media_source_id: str, ) -> tuple[str, bytes]: """Get TTS audio as extension, data.""" manager = hass.data[DATA_TTS_MANAGER] - cache_key = manager.async_cache_message_in_memory( + cache = manager.async_cache_message_in_memory( **media_source_id_to_kwargs(media_source_id) ) - return await manager.async_get_tts_audio(cache_key) + data = b"".join([chunk async for chunk in cache.async_stream_data()]) + return cache.extension, data @callback @@ -210,18 +298,19 @@ def async_get_text_to_speech_languages(hass: HomeAssistant) -> set[str]: return languages -async def async_convert_audio( +async def _async_convert_audio( hass: HomeAssistant, from_extension: str, - audio_bytes: bytes, + audio_bytes_gen: AsyncGenerator[bytes], to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, to_sample_bytes: int | None = None, -) -> bytes: +) -> AsyncGenerator[bytes]: """Convert audio to a preferred format using ffmpeg.""" ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) - return await hass.async_add_executor_job( + audio_bytes = b"".join([chunk async for chunk in audio_bytes_gen]) + data = await hass.async_add_executor_job( lambda: _convert_audio( ffmpeg_manager.binary, from_extension, @@ -232,6 +321,7 @@ async def async_convert_audio( to_sample_bytes=to_sample_bytes, ) ) + yield data def _convert_audio( @@ -395,32 +485,33 @@ class ResultStream: return f"/api/tts_proxy/{self.token}" @cached_property - def _result_cache_key(self) -> asyncio.Future[str]: - """Get the future that returns the cache key.""" + def _result_cache(self) -> asyncio.Future[TTSCache]: + """Get the future that returns the cache.""" return asyncio.Future() @callback - def async_set_message_cache_key(self, cache_key: str) -> None: - """Set cache key for message to be streamed.""" - self._result_cache_key.set_result(cache_key) + def async_set_message_cache(self, cache: TTSCache) -> None: + """Set cache containing message audio to be streamed.""" + self._result_cache.set_result(cache) @callback def async_set_message(self, message: str) -> None: """Set message to be generated.""" - cache_key = self._manager.async_cache_message_in_memory( - engine=self.engine, - message=message, - use_file_cache=self.use_file_cache, - language=self.language, - options=self.options, + self._result_cache.set_result( + self._manager.async_cache_message_in_memory( + engine=self.engine, + message=message, + use_file_cache=self.use_file_cache, + language=self.language, + options=self.options, + ) ) - self._result_cache_key.set_result(cache_key) async def async_stream_result(self) -> AsyncGenerator[bytes]: """Get the stream of this result.""" - cache_key = await self._result_cache_key - _extension, data = await self._manager.async_get_tts_audio(cache_key) - yield data + cache = await self._result_cache + async for chunk in cache.async_stream_data(): + yield chunk def _hash_options(options: dict) -> str: @@ -477,7 +568,7 @@ class MemcacheCleanup: now = monotonic() for cache_key, info in list(memcache.items()): - if info["last_used"] + maxage < now: + if info.last_used + maxage < now: _LOGGER.debug("Cleaning up %s", cache_key) del memcache[cache_key] @@ -632,15 +723,18 @@ class SpeechManager: if message is None: return result_stream - cache_key = self._async_ensure_cached_in_memory( - engine=engine, - engine_instance=engine_instance, - message=message, - use_file_cache=use_file_cache, - language=language, - options=options, + # We added this method as an alternative to stream.async_set_message + # to avoid the options being processed twice + result_stream.async_set_message_cache( + self._async_ensure_cached_in_memory( + engine=engine, + engine_instance=engine_instance, + message=message, + use_file_cache=use_file_cache, + language=language, + options=options, + ) ) - result_stream.async_set_message_cache_key(cache_key) return result_stream @@ -652,7 +746,7 @@ class SpeechManager: use_file_cache: bool | None = None, language: str | None = None, options: dict | None = None, - ) -> str: + ) -> TTSCache: """Make sure a message is cached in memory and returns cache key.""" if (engine_instance := get_engine_instance(self.hass, engine)) is None: raise HomeAssistantError(f"Provider {engine} not found") @@ -679,7 +773,7 @@ class SpeechManager: use_file_cache: bool, language: str, options: dict, - ) -> str: + ) -> TTSCache: """Ensure a message is cached. Requires options, language to be processed. @@ -691,62 +785,101 @@ class SpeechManager: ).lower() # Is speech already in memory - if cache_key in self.mem_cache: - return cache_key + if cache := self.mem_cache.get(cache_key): + _LOGGER.debug("Found audio in cache for %s", message[0:32]) + return cache - if use_file_cache and cache_key in self.file_cache: - coro = self._async_load_file_to_mem(cache_key) + store_to_disk = use_file_cache + + if use_file_cache and (filename := self.file_cache.get(cache_key)): + _LOGGER.debug("Loading audio from disk for %s", message[0:32]) + extension = os.path.splitext(filename)[1][1:] + data_gen = self._async_load_file(cache_key) + store_to_disk = False else: - coro = self._async_generate_tts_audio( - engine_instance, cache_key, message, use_file_cache, language, options + _LOGGER.debug("Generating audio for %s", message[0:32]) + extension = options.get(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT) + data_gen = self._async_generate_tts_audio( + engine_instance, message, language, options ) - task = self.hass.async_create_task(coro, eager_start=False) + cache = TTSCache( + cache_key=cache_key, + extension=extension, + data_gen=data_gen, + ) - def handle_error(future: asyncio.Future) -> None: - """Handle error.""" - if not (err := future.exception()): - return + self.mem_cache[cache_key] = cache + self.hass.async_create_background_task( + self._load_data_into_cache( + cache, engine_instance, message, store_to_disk, language, options + ), + f"tts_load_data_into_cache_{engine_instance.name}", + ) + self.memcache_cleanup.schedule() + return cache + + async def _load_data_into_cache( + self, + cache: TTSCache, + engine_instance: TextToSpeechEntity | Provider, + message: str, + store_to_disk: bool, + language: str, + options: dict, + ) -> None: + """Load and process a finished loading TTS Cache.""" + try: + data = await cache.async_load_data() + except Exception as err: # pylint: disable=broad-except # noqa: BLE001 # Truncate message so we don't flood the logs. Cutting off at 32 chars # but since we add 3 dots to truncated message, we cut off at 35. trunc_msg = message if len(message) < 35 else f"{message[0:32]}…" - _LOGGER.error("Error generating audio for %s: %s", trunc_msg, err) - self.mem_cache.pop(cache_key, None) + _LOGGER.error("Error getting audio for %s: %s", trunc_msg, err) + self.mem_cache.pop(cache.cache_key, None) + return - task.add_done_callback(handle_error) + if not store_to_disk: + return - self.mem_cache[cache_key] = { - "extension": "", - "voice": b"", - "pending": task, - "last_used": monotonic(), - } - return cache_key + filename = f"{cache.cache_key}.{cache.extension}".lower() - async def async_get_tts_audio(self, cache_key: str) -> tuple[str, bytes]: - """Fetch TTS audio.""" - cached = self.mem_cache.get(cache_key) - if cached is None: - raise HomeAssistantError("Audio not cached") - if pending := cached.get("pending"): - await pending - cached = self.mem_cache[cache_key] - cached["last_used"] = monotonic() - return cached["extension"], cached["voice"] + # Validate filename + if not _RE_VOICE_FILE.match(filename) and not _RE_LEGACY_VOICE_FILE.match( + filename + ): + raise HomeAssistantError( + f"TTS filename '{filename}' from {engine_instance.name} is invalid!" + ) + + if cache.extension == "mp3": + name = ( + engine_instance.name if isinstance(engine_instance.name, str) else "-" + ) + data = self.write_tags(filename, data, name, message, language, options) + + voice_file = os.path.join(self.cache_dir, filename) + + def save_speech() -> None: + """Store speech to filesystem.""" + with open(voice_file, "wb") as speech: + speech.write(data) + + try: + await self.hass.async_add_executor_job(save_speech) + except OSError as err: + _LOGGER.error("Can't write %s: %s", filename, err) + else: + self.file_cache[cache.cache_key] = filename async def _async_generate_tts_audio( self, engine_instance: TextToSpeechEntity | Provider, - cache_key: str, message: str, - cache_to_disk: bool, language: str, options: dict[str, Any], - ) -> None: - """Start loading of the TTS audio. - - This method is a coroutine. - """ + ) -> AsyncGenerator[bytes]: + """Generate TTS audio from an engine.""" options = dict(options or {}) supported_options = engine_instance.supported_options or [] @@ -794,15 +927,27 @@ class SpeechManager: extension, data = await engine_instance.async_get_tts_audio( message, language, options ) - else: - extension, data = await engine_instance.internal_async_get_tts_audio( - message, language, options - ) - if data is None or extension is None: - raise HomeAssistantError( - f"No TTS from {engine_instance.name} for '{message}'" + if data is None or extension is None: + raise HomeAssistantError( + f"No TTS from {engine_instance.name} for '{message}'" + ) + + async def make_data_generator(data: bytes) -> AsyncGenerator[bytes]: + yield data + + data_gen = make_data_generator(data) + + else: + + async def message_gen() -> AsyncGenerator[str]: + yield message + + tts_result = await engine_instance.internal_async_stream_tts_audio( + TTSAudioRequest(language, options, message_gen()) ) + extension = tts_result.extension + data_gen = tts_result.data_gen # Only convert if we have a preferred format different than the # expected format from the TTS system, or if a specific sample @@ -815,62 +960,21 @@ class SpeechManager: ) if needs_conversion: - data = await async_convert_audio( + data_gen = _async_convert_audio( self.hass, extension, - data, + data_gen, to_extension=final_extension, to_sample_rate=sample_rate, to_sample_channels=sample_channels, to_sample_bytes=sample_bytes, ) - # Create file infos - filename = f"{cache_key}.{final_extension}".lower() + async for chunk in data_gen: + yield chunk - # Validate filename - if not _RE_VOICE_FILE.match(filename) and not _RE_LEGACY_VOICE_FILE.match( - filename - ): - raise HomeAssistantError( - f"TTS filename '{filename}' from {engine_instance.name} is invalid!" - ) - - # Save to memory - if final_extension == "mp3": - data = self.write_tags( - filename, data, engine_instance.name, message, language, options - ) - - self._async_store_to_memcache(cache_key, final_extension, data) - - if not cache_to_disk: - return - - voice_file = os.path.join(self.cache_dir, filename) - - def save_speech() -> None: - """Store speech to filesystem.""" - with open(voice_file, "wb") as speech: - speech.write(data) - - # Don't await, we're going to do this in the background - task = self.hass.async_add_executor_job(save_speech) - - def write_done(future: asyncio.Future) -> None: - """Write is done task.""" - if err := future.exception(): - _LOGGER.error("Can't write %s: %s", filename, err) - else: - self.file_cache[cache_key] = filename - - task.add_done_callback(write_done) - - async def _async_load_file_to_mem(self, cache_key: str) -> None: - """Load voice from file cache into memory. - - This method is a coroutine. - """ + async def _async_load_file(self, cache_key: str) -> AsyncGenerator[bytes]: + """Load TTS audio from disk.""" if not (filename := self.file_cache.get(cache_key)): raise HomeAssistantError(f"Key {cache_key} not in file cache!") @@ -887,22 +991,7 @@ class SpeechManager: del self.file_cache[cache_key] raise HomeAssistantError(f"Can't read {voice_file}") from err - extension = os.path.splitext(filename)[1][1:] - - self._async_store_to_memcache(cache_key, extension, data) - - @callback - def _async_store_to_memcache( - self, cache_key: str, extension: str, data: bytes - ) -> None: - """Store data to memcache and set timer to remove it.""" - self.mem_cache[cache_key] = { - "extension": extension, - "voice": data, - "pending": None, - "last_used": monotonic(), - } - self.memcache_cleanup.schedule() + yield data @staticmethod def write_tags( diff --git a/homeassistant/components/tts/entity.py b/homeassistant/components/tts/entity.py index ef65886452d..199d673398e 100644 --- a/homeassistant/components/tts/entity.py +++ b/homeassistant/components/tts/entity.py @@ -1,6 +1,7 @@ """Entity for Text-to-Speech.""" -from collections.abc import Mapping +from collections.abc import AsyncGenerator, Mapping +from dataclasses import dataclass from functools import partial from typing import Any, final @@ -16,6 +17,7 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util import dt as dt_util @@ -31,6 +33,23 @@ CACHED_PROPERTIES_WITH_ATTR_ = { } +@dataclass +class TTSAudioRequest: + """Request to get TTS audio.""" + + language: str + options: dict[str, Any] + message_gen: AsyncGenerator[str] + + +@dataclass +class TTSAudioResponse: + """Response containing TTS audio stream.""" + + extension: str + data_gen: AsyncGenerator[bytes] + + class TextToSpeechEntity(RestoreEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Represent a single TTS engine.""" @@ -128,19 +147,37 @@ class TextToSpeechEntity(RestoreEntity, cached_properties=CACHED_PROPERTIES_WITH ) @final - async def internal_async_get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> TtsAudioType: + async def internal_async_stream_tts_audio( + self, request: TTSAudioRequest + ) -> TTSAudioResponse: """Process an audio stream to TTS service. Only streaming content is allowed! """ self.__last_tts_loaded = dt_util.utcnow().isoformat() self.async_write_ha_state() - return await self.async_get_tts_audio( - message=message, language=language, options=options + return await self.async_stream_tts_audio(request) + + async def async_stream_tts_audio( + self, request: TTSAudioRequest + ) -> TTSAudioResponse: + """Generate speech from an incoming message. + + The default implementation is backwards compatible with async_get_tts_audio. + """ + message = "".join([chunk async for chunk in request.message_gen]) + extension, data = await self.async_get_tts_audio( + message, request.language, request.options ) + if extension is None or data is None: + raise HomeAssistantError(f"No TTS from {self.entity_id} for '{message}'") + + async def data_gen() -> AsyncGenerator[bytes]: + yield data + + return TTSAudioResponse(extension, data_gen()) + def get_tts_audio( self, message: str, language: str, options: dict[str, Any] ) -> TtsAudioType: diff --git a/homeassistant/components/twilio/strings.json b/homeassistant/components/twilio/strings.json index 871711ff087..f4b7dee707f 100644 --- a/homeassistant/components/twilio/strings.json +++ b/homeassistant/components/twilio/strings.json @@ -12,7 +12,7 @@ "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, "create_entry": { - "default": "To send events to Home Assistant, you will need to setup [Webhooks with Twilio]({twilio_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." + "default": "To send events to Home Assistant, you will need to set up [webhooks with Twilio]({twilio_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." } } } diff --git a/homeassistant/components/ukraine_alarm/coordinator.py b/homeassistant/components/ukraine_alarm/coordinator.py index 267358e4aa6..b4e1decb1a1 100644 --- a/homeassistant/components/ukraine_alarm/coordinator.py +++ b/homeassistant/components/ukraine_alarm/coordinator.py @@ -52,7 +52,7 @@ class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): except aiohttp.ClientError as error: raise UpdateFailed(f"Error fetching alerts from API: {error}") from error - current = {alert_type: False for alert_type in ALERT_TYPES} + current = dict.fromkeys(ALERT_TYPES, False) for alert in res[0]["activeAlerts"]: current[alert["type"]] = True diff --git a/homeassistant/components/usb/__init__.py b/homeassistant/components/usb/__init__.py index d68742522a0..994f4f71c35 100644 --- a/homeassistant/components/usb/__init__.py +++ b/homeassistant/components/usb/__init__.py @@ -265,8 +265,15 @@ class USBDiscovery: async def async_setup(self) -> None: """Set up USB Discovery.""" - if self._async_supports_monitoring(): - await self._async_start_monitor() + try: + await self._async_start_aiousbwatcher() + except InotifyNotAvailableError as ex: + _LOGGER.info( + "Falling back to periodic filesystem polling for development, " + "aiousbwatcher is not available on this system: %s", + ex, + ) + self._async_start_monitor_polling() self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, self.async_start) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.async_stop) @@ -281,22 +288,6 @@ class USBDiscovery: if self._request_debouncer: self._request_debouncer.async_shutdown() - @hass_callback - def _async_supports_monitoring(self) -> bool: - return sys.platform == "linux" - - async def _async_start_monitor(self) -> None: - """Start monitoring hardware.""" - try: - await self._async_start_aiousbwatcher() - except InotifyNotAvailableError as ex: - _LOGGER.info( - "Falling back to periodic filesystem polling for development, aiousbwatcher " - "is not available on this system: %s", - ex, - ) - self._async_start_monitor_polling() - @hass_callback def _async_start_monitor_polling(self) -> None: """Start monitoring hardware with polling (for development only!).""" diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 30d1d153d9e..c7e6af8891a 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -108,7 +108,7 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "invalid_host" except ValloxApiException: errors[CONF_HOST] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors[CONF_HOST] = "unknown" else: diff --git a/homeassistant/components/velbus/config_flow.py b/homeassistant/components/velbus/config_flow.py index fc5da92588a..7c93d8784ad 100644 --- a/homeassistant/components/velbus/config_flow.py +++ b/homeassistant/components/velbus/config_flow.py @@ -63,7 +63,7 @@ class VelbusConfigFlow(ConfigFlow, domain=DOMAIN): self._device = "tls://" else: self._device = "" - if user_input[CONF_PASSWORD] != "": + if CONF_PASSWORD in user_input and user_input[CONF_PASSWORD] != "": self._device += f"{user_input[CONF_PASSWORD]}@" self._device += f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}" self._async_abort_entries_match({CONF_PORT: self._device}) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 29504277651..1cb540b22ec 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -14,7 +14,7 @@ "velbus-protocol" ], "quality_scale": "bronze", - "requirements": ["velbus-aio==2025.1.1"], + "requirements": ["velbus-aio==2025.3.1"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/velbus/strings.json b/homeassistant/components/velbus/strings.json index a50395af115..35f94e54470 100644 --- a/homeassistant/components/velbus/strings.json +++ b/homeassistant/components/velbus/strings.json @@ -2,10 +2,11 @@ "config": { "step": { "user": { - "title": "Define the Velbus connection type", - "data": { - "name": "The name for this Velbus connection", - "port": "Connection string" + "title": "Define the Velbus connection", + "description": "How do you want to configure the Velbus hub?", + "menu_options": { + "network": "Via network connection", + "usbselect": "Via USB device" } }, "network": { diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 1273ab914f8..4e39fe40f2d 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -30,9 +30,13 @@ VS_HUMIDIFIER_MODE_HUMIDITY = "humidity" VS_HUMIDIFIER_MODE_MANUAL = "manual" VS_HUMIDIFIER_MODE_SLEEP = "sleep" -NIGHT_LIGHT_LEVEL_BRIGHT = "bright" -NIGHT_LIGHT_LEVEL_DIM = "dim" -NIGHT_LIGHT_LEVEL_OFF = "off" +FAN_NIGHT_LIGHT_LEVEL_DIM = "dim" +FAN_NIGHT_LIGHT_LEVEL_OFF = "off" +FAN_NIGHT_LIGHT_LEVEL_ON = "on" + +HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT = "bright" +HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM = "dim" +HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF = "off" VeSyncHumidifierDevice = VeSyncHumid200300S | VeSyncSuperior6000S """Humidifier device types""" diff --git a/homeassistant/components/vesync/select.py b/homeassistant/components/vesync/select.py index c266985fc2b..a9d2e1b533a 100644 --- a/homeassistant/components/vesync/select.py +++ b/homeassistant/components/vesync/select.py @@ -15,9 +15,12 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .common import rgetattr from .const import ( DOMAIN, - NIGHT_LIGHT_LEVEL_BRIGHT, - NIGHT_LIGHT_LEVEL_DIM, - NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_DIM, + FAN_NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_ON, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, VS_COORDINATOR, VS_DEVICES, VS_DISCOVERY, @@ -27,14 +30,14 @@ from .entity import VeSyncBaseEntity _LOGGER = logging.getLogger(__name__) -VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP = { - 100: NIGHT_LIGHT_LEVEL_BRIGHT, - 50: NIGHT_LIGHT_LEVEL_DIM, - 0: NIGHT_LIGHT_LEVEL_OFF, +VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP = { + 100: HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT, + 50: HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, + 0: HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, } -HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP = { - v: k for k, v in VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.items() +HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP = { + v: k for k, v in VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.items() } @@ -48,20 +51,39 @@ class VeSyncSelectEntityDescription(SelectEntityDescription): SELECT_DESCRIPTIONS: list[VeSyncSelectEntityDescription] = [ + # night_light for humidifier VeSyncSelectEntityDescription( key="night_light_level", translation_key="night_light_level", - options=list(VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.values()), + options=list(VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.values()), icon="mdi:brightness-6", - exists_fn=lambda device: rgetattr(device, "night_light"), + exists_fn=lambda device: rgetattr(device, "set_night_light_brightness"), # The select_option service framework ensures that only options specified are # accepted. ServiceValidationError gets raised for invalid value. select_option_fn=lambda device, value: device.set_night_light_brightness( - HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP.get(value, 0) + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get(value, 0) ), # Reporting "off" as the choice for unhandled level. - current_option_fn=lambda device: VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.get( - device.details.get("night_light_brightness"), NIGHT_LIGHT_LEVEL_OFF + current_option_fn=lambda device: VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get( + device.details.get("night_light_brightness"), + HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, + ), + ), + # night_light for fan devices based on pyvesync.VeSyncAirBypass + VeSyncSelectEntityDescription( + key="night_light_level", + translation_key="night_light_level", + options=[ + FAN_NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_DIM, + FAN_NIGHT_LIGHT_LEVEL_ON, + ], + icon="mdi:brightness-6", + exists_fn=lambda device: rgetattr(device, "set_night_light"), + select_option_fn=lambda device, value: device.set_night_light(value), + current_option_fn=lambda device: VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get( + device.details.get("night_light"), + FAN_NIGHT_LIGHT_LEVEL_OFF, ), ), ] diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index eabb2969580..9b63bf3e614 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -71,7 +71,8 @@ "state": { "bright": "Bright", "dim": "Dim", - "off": "[%key:common::state::off%]" + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" } } }, diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 733cda363e5..04049f026bd 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -515,11 +515,11 @@ "services": { "set_vicare_mode": { "name": "Set ViCare mode", - "description": "Set a ViCare mode.", + "description": "Sets the mode of the climate device as defined by Viessmann.", "fields": { "vicare_mode": { "name": "ViCare mode", - "description": "ViCare mode." + "description": "For supported values, see the `vicare_modes` attribute of the climate entity." } } } diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index cdba7f1b8c2..5612591c595 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -114,8 +114,8 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception as err: # noqa: BLE001 - _LOGGER.error("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id(info[CONF_ID]) diff --git a/homeassistant/components/vodafone_station/__init__.py b/homeassistant/components/vodafone_station/__init__.py index 871afe09a2e..9f118fe4fbd 100644 --- a/homeassistant/components/vodafone_station/__init__.py +++ b/homeassistant/components/vodafone_station/__init__.py @@ -1,16 +1,15 @@ """Vodafone Station integration.""" -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from .const import DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter PLATFORMS = [Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -> bool: """Set up Vodafone Station platform.""" coordinator = VodafoneStationRouter( hass, @@ -22,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -31,10 +30,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data await coordinator.api.logout() await coordinator.api.close() hass.data[DOMAIN].pop(entry.entry_id) @@ -42,7 +41,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: VodafoneConfigEntry) -> None: """Update when config_entry options update.""" if entry.options: await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/vodafone_station/button.py b/homeassistant/components/vodafone_station/button.py index 9812cef48d6..8dda4d49c7b 100644 --- a/homeassistant/components/vodafone_station/button.py +++ b/homeassistant/components/vodafone_station/button.py @@ -4,21 +4,32 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from json.decoder import JSONDecodeError from typing import Any, Final +from aiovodafone.exceptions import ( + AlreadyLogged, + CannotAuthenticate, + CannotConnect, + GenericLoginError, +) + from homeassistant.components.button import ( ButtonDeviceClass, ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import _LOGGER, DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -68,13 +79,13 @@ BUTTON_TYPES: Final = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up entry.""" _LOGGER.debug("Setting up Vodafone Station buttons") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors @@ -106,4 +117,25 @@ class VodafoneStationSensorEntity( async def async_press(self) -> None: """Triggers the Shelly button press service.""" - await self.entity_description.press_action(self.coordinator) + + try: + await self.entity_description.press_action(self.coordinator) + except CannotAuthenticate as err: + self.coordinator.config_entry.async_start_reauth(self.hass) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_authenticate", + translation_placeholders={"error": repr(err)}, + ) from err + except ( + CannotConnect, + AlreadyLogged, + GenericLoginError, + JSONDecodeError, + ) as err: + self.coordinator.last_update_success = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_execute_action", + translation_placeholders={"error": repr(err)}, + ) from err diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index 7a80244f8d6..6641f5f5711 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -12,16 +12,12 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, ) -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from .const import _LOGGER, DEFAULT_HOST, DEFAULT_USERNAME, DOMAIN +from .coordinator import VodafoneConfigEntry def user_form_schema(user_input: dict[str, Any] | None) -> vol.Schema: @@ -63,7 +59,7 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: VodafoneConfigEntry, ) -> VodafoneStationOptionsFlowHandler: """Get the options flow for this handler.""" return VodafoneStationOptionsFlowHandler() @@ -143,6 +139,47 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the device.""" + reconfigure_entry = self._get_reconfigure_entry() + if not user_input: + return self.async_show_form( + step_id="reconfigure", data_schema=user_form_schema(user_input) + ) + + updated_host = user_input[CONF_HOST] + + if reconfigure_entry.data[CONF_HOST] != updated_host: + self._async_abort_entries_match({CONF_HOST: updated_host}) + + errors: dict[str, str] = {} + + errors = {} + + try: + await validate_input(self.hass, user_input) + except aiovodafone_exceptions.AlreadyLogged: + errors["base"] = "already_logged" + except aiovodafone_exceptions.CannotConnect: + errors["base"] = "cannot_connect" + except aiovodafone_exceptions.CannotAuthenticate: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates={CONF_HOST: updated_host} + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=user_form_schema(user_input), + errors=errors, + ) + class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index b7986d06c25..cee66bd2e7c 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -21,6 +21,8 @@ from .helpers import cleanup_device_tracker CONSIDER_HOME_SECONDS = DEFAULT_CONSIDER_HOME.total_seconds() +type VodafoneConfigEntry = ConfigEntry[VodafoneStationRouter] + @dataclass(slots=True) class VodafoneStationDeviceInfo: @@ -42,7 +44,7 @@ class UpdateCoordinatorDataType: class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): """Queries router running Vodafone Station firmware.""" - config_entry: ConfigEntry + config_entry: VodafoneConfigEntry def __init__( self, @@ -50,7 +52,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): host: str, username: str, password: str, - config_entry: ConfigEntry, + config_entry: VodafoneConfigEntry, ) -> None: """Initialize the scanner.""" @@ -120,14 +122,22 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): data_sensors = await self.api.get_sensor_data() await self.api.logout() except exceptions.CannotAuthenticate as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="cannot_authenticate", + translation_placeholders={"error": repr(err)}, + ) from err except ( exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, JSONDecodeError, ) as err: - raise UpdateFailed(f"Error fetching data: {err!r}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": repr(err)}, + ) from err except (ConfigEntryAuthFailed, UpdateFailed): await self.api.close() raise diff --git a/homeassistant/components/vodafone_station/device_tracker.py b/homeassistant/components/vodafone_station/device_tracker.py index ece4bd05a02..4efa26cda8c 100644 --- a/homeassistant/components/vodafone_station/device_tracker.py +++ b/homeassistant/components/vodafone_station/device_tracker.py @@ -3,25 +3,31 @@ from __future__ import annotations from homeassistant.components.device_tracker import ScannerEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import _LOGGER, DOMAIN -from .coordinator import VodafoneStationDeviceInfo, VodafoneStationRouter +from .const import _LOGGER +from .coordinator import ( + VodafoneConfigEntry, + VodafoneStationDeviceInfo, + VodafoneStationRouter, +) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up device tracker for Vodafone Station component.""" _LOGGER.debug("Start device trackers setup") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data tracked: set = set() diff --git a/homeassistant/components/vodafone_station/diagnostics.py b/homeassistant/components/vodafone_station/diagnostics.py index e306d6caca2..4778e7d5a4e 100644 --- a/homeassistant/components/vodafone_station/diagnostics.py +++ b/homeassistant/components/vodafone_station/diagnostics.py @@ -5,22 +5,20 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: VodafoneConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors return { diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 4acafc8df3a..29cb3c070ab 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -7,5 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], + "quality_scale": "silver", "requirements": ["aiovodafone==0.6.1"] } diff --git a/homeassistant/components/vodafone_station/quality_scale.yaml b/homeassistant/components/vodafone_station/quality_scale.yaml new file mode 100644 index 00000000000..d60020f5e47 --- /dev/null +++ b/homeassistant/components/vodafone_station/quality_scale.yaml @@ -0,0 +1,76 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: no actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: no actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: no events + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: device not discoverable + discovery: + status: exempt + comment: device not discoverable + docs-data-update: done + docs-examples: done + docs-known-limitations: + status: exempt + comment: no known limitations, yet + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: no known use case + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no known use cases for repair issues or flows, yet + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index d29fb7f21e9..2573864330d 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -12,14 +12,16 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import _LOGGER, DOMAIN, LINE_TYPES -from .coordinator import VodafoneStationRouter +from .const import _LOGGER, LINE_TYPES +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] UPTIME_DEVIATION = 60 @@ -166,13 +168,13 @@ SENSOR_TYPES: Final = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up entry.""" _LOGGER.debug("Setting up Vodafone Station sensors") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors diff --git a/homeassistant/components/vodafone_station/strings.json b/homeassistant/components/vodafone_station/strings.json index 8910d7178b7..958b774a485 100644 --- a/homeassistant/components/vodafone_station/strings.json +++ b/homeassistant/components/vodafone_station/strings.json @@ -3,9 +3,11 @@ "flow_title": "{host}", "step": { "reauth_confirm": { - "description": "Please enter the correct password for host: {host}", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::vodafone_station::config::step::user::data_description::password%]" } }, "user": { @@ -15,7 +17,21 @@ "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "host": "The hostname or IP address of your Vodafone Station." + "host": "The hostname or IP address of your Vodafone Station.", + "username": "The username for your Vodafone Station.", + "password": "The password for your Vodafone Station." + } + }, + "reconfigure": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::vodafone_station::config::step::user::data_description::host%]", + "username": "[%key:component::vodafone_station::config::step::user::data_description::username%]", + "password": "[%key:component::vodafone_station::config::step::user::data_description::password%]" } } }, @@ -23,16 +39,17 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "already_logged": "User already logged-in, please try again later.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "model_not_supported": "The device model is currently unsupported.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { - "already_logged": "User already logged-in, please try again later.", + "already_logged": "[%key:component::vodafone_station::config::abort::already_logged%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "model_not_supported": "The device model is currently unsupported.", + "model_not_supported": "[%key:component::vodafone_station::config::abort::model_not_supported%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, @@ -41,20 +58,35 @@ "init": { "data": { "consider_home": "Seconds to consider a device at 'home'" + }, + "data_description": { + "consider_home": "The number of seconds to wait until marking a device as not home after it disconnects from the network." } } } }, "entity": { "button": { - "dsl_reconnect": { "name": "DSL reconnect" }, - "fiber_reconnect": { "name": "Fiber reconnect" }, - "internet_key_reconnect": { "name": "Internet key reconnect" } + "dsl_reconnect": { + "name": "DSL reconnect" + }, + "fiber_reconnect": { + "name": "Fiber reconnect" + }, + "internet_key_reconnect": { + "name": "Internet key reconnect" + } }, "sensor": { - "external_ipv4": { "name": "WAN IPv4 address" }, - "external_ipv6": { "name": "WAN IPv6 address" }, - "external_ip_key": { "name": "WAN internet key address" }, + "external_ipv4": { + "name": "WAN IPv4 address" + }, + "external_ipv6": { + "name": "WAN IPv6 address" + }, + "external_ip_key": { + "name": "WAN internet key address" + }, "active_connection": { "name": "Active connection", "state": { @@ -64,15 +96,44 @@ "internet_key": "Internet key" } }, - "down_stream": { "name": "WAN download rate" }, - "up_stream": { "name": "WAN upload rate" }, - "fw_version": { "name": "Firmware version" }, - "phone_num1": { "name": "Phone number (1)" }, - "phone_num2": { "name": "Phone number (2)" }, - "sys_uptime": { "name": "Uptime" }, - "sys_cpu_usage": { "name": "CPU usage" }, - "sys_memory_usage": { "name": "Memory usage" }, - "sys_reboot_cause": { "name": "Reboot cause" } + "down_stream": { + "name": "WAN download rate" + }, + "up_stream": { + "name": "WAN upload rate" + }, + "fw_version": { + "name": "Firmware version" + }, + "phone_num1": { + "name": "Phone number (1)" + }, + "phone_num2": { + "name": "Phone number (2)" + }, + "sys_uptime": { + "name": "Uptime" + }, + "sys_cpu_usage": { + "name": "CPU usage" + }, + "sys_memory_usage": { + "name": "Memory usage" + }, + "sys_reboot_cause": { + "name": "Reboot cause" + } + } + }, + "exceptions": { + "update_failed": { + "message": "Error fetching data: {error}" + }, + "cannot_execute_action": { + "message": "Cannot execute requested action: {error}" + }, + "cannot_authenticate": { + "message": "Error authenticating: {error}" } } } diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 1e4c249c720..dfd397fde14 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -6,6 +6,7 @@ "dependencies": ["assist_pipeline", "assist_satellite", "intent", "network"], "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", + "loggers": ["voip_utils"], "quality_scale": "internal", "requirements": ["voip-utils==0.3.1"] } diff --git a/homeassistant/components/wallbox/number.py b/homeassistant/components/wallbox/number.py index 462266636d7..a5880f6e0f7 100644 --- a/homeassistant/components/wallbox/number.py +++ b/homeassistant/components/wallbox/number.py @@ -71,9 +71,7 @@ NUMBER_TYPES: dict[str, WallboxNumberEntityDescription] = { CHARGER_MAX_ICP_CURRENT_KEY: WallboxNumberEntityDescription( key=CHARGER_MAX_ICP_CURRENT_KEY, translation_key="maximum_icp_current", - max_value_fn=lambda coordinator: cast( - float, coordinator.data[CHARGER_MAX_AVAILABLE_POWER_KEY] - ), + max_value_fn=lambda _: 255, min_value_fn=lambda _: 6, set_value_fn=lambda coordinator: coordinator.async_set_icp_current, native_step=1, diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py index c1747af1f11..fd591215d8b 100644 --- a/homeassistant/components/watergate/__init__.py +++ b/homeassistant/components/watergate/__init__.py @@ -18,8 +18,9 @@ from homeassistant.components.webhook import ( ) from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN +from .const import AUTO_SHUT_OFF_EVENT_NAME, DOMAIN from .coordinator import WatergateConfigEntry, WatergateDataCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,8 +29,10 @@ WEBHOOK_TELEMETRY_TYPE = "telemetry" WEBHOOK_VALVE_TYPE = "valve" WEBHOOK_WIFI_CHANGED_TYPE = "wifi-changed" WEBHOOK_POWER_SUPPLY_CHANGED_TYPE = "power-supply-changed" +WEBHOOK_AUTO_SHUT_OFF = "auto-shut-off-report" PLATFORMS: list[Platform] = [ + Platform.EVENT, Platform.SENSOR, Platform.VALVE, ] @@ -120,6 +123,10 @@ def get_webhook_handler( coordinator_data.networking.rssi = data.rssi elif body_type == WEBHOOK_POWER_SUPPLY_CHANGED_TYPE: coordinator_data.state.power_supply = data.supply + elif body_type == WEBHOOK_AUTO_SHUT_OFF: + async_dispatcher_send( + hass, AUTO_SHUT_OFF_EVENT_NAME.format(data.type.lower()), data + ) coordinator.async_set_updated_data(coordinator_data) diff --git a/homeassistant/components/watergate/const.py b/homeassistant/components/watergate/const.py index 22a14330af9..c6726d9185f 100644 --- a/homeassistant/components/watergate/const.py +++ b/homeassistant/components/watergate/const.py @@ -3,3 +3,5 @@ DOMAIN = "watergate" MANUFACTURER = "Watergate" + +AUTO_SHUT_OFF_EVENT_NAME = "watergate_{}" diff --git a/homeassistant/components/watergate/event.py b/homeassistant/components/watergate/event.py new file mode 100644 index 00000000000..cf2447df4b3 --- /dev/null +++ b/homeassistant/components/watergate/event.py @@ -0,0 +1,78 @@ +"""Module contains the AutoShutOffEvent class for handling auto shut off events.""" + +from watergate_local_api.models.auto_shut_off_report import AutoShutOffReport + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import WatergateConfigEntry +from .const import AUTO_SHUT_OFF_EVENT_NAME +from .coordinator import WatergateDataCoordinator +from .entity import WatergateEntity + +VOLUME_AUTO_SHUT_OFF = "volume_threshold" +DURATION_AUTO_SHUT_OFF = "duration_threshold" + + +DESCRIPTIONS: list[EventEntityDescription] = [ + EventEntityDescription( + translation_key="auto_shut_off_volume", + key="auto_shut_off_volume", + event_types=[VOLUME_AUTO_SHUT_OFF], + ), + EventEntityDescription( + translation_key="auto_shut_off_duration", + key="auto_shut_off_duration", + event_types=[DURATION_AUTO_SHUT_OFF], + ), +] + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Event entities from config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + AutoShutOffEvent(coordinator, description) for description in DESCRIPTIONS + ) + + +class AutoShutOffEvent(WatergateEntity, EventEntity): + """Event for Auto Shut Off.""" + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_description: EventEntityDescription, + ) -> None: + """Initialize Auto Shut Off Entity.""" + super().__init__(coordinator, entity_description.key) + self.entity_description = entity_description + + async def async_added_to_hass(self): + """Register the callback for event handling when the entity is added.""" + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect( + self.hass, + AUTO_SHUT_OFF_EVENT_NAME.format(self.event_types[0]), + self._async_handle_event, + ) + ) + + @callback + def _async_handle_event(self, event: AutoShutOffReport) -> None: + self._trigger_event( + event.type.lower(), + {"volume": event.volume, "duration": event.duration}, + ) + self.async_write_ha_state() diff --git a/homeassistant/components/watergate/icons.json b/homeassistant/components/watergate/icons.json new file mode 100644 index 00000000000..28a0bfbc825 --- /dev/null +++ b/homeassistant/components/watergate/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "event": { + "auto_shut_off_volume": { + "default": "mdi:water" + }, + "auto_shut_off_duration": { + "default": "mdi:timelapse" + } + } + } +} diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml index b116eff970e..73a39bd5264 100644 --- a/homeassistant/components/watergate/quality_scale.yaml +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -17,10 +17,7 @@ rules: docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done - entity-event-setup: - status: exempt - comment: | - Entities of this integration does not explicitly subscribe to events. + entity-event-setup: done entity-unique-id: done has-entity-name: done runtime-data: done diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json index c312525e420..634e05e7973 100644 --- a/homeassistant/components/watergate/strings.json +++ b/homeassistant/components/watergate/strings.json @@ -19,6 +19,42 @@ } }, "entity": { + "event": { + "auto_shut_off_volume": { + "name": "Volume auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "Volume", + "duration_threshold": "Duration" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + }, + "auto_shut_off_duration": { + "name": "Duration auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]", + "duration_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + } + }, "sensor": { "water_meter_volume": { "name": "Water meter volume" diff --git a/homeassistant/components/weatherflow/sensor.py b/homeassistant/components/weatherflow/sensor.py index 8eee472fe5c..10c04b3283b 100644 --- a/homeassistant/components/weatherflow/sensor.py +++ b/homeassistant/components/weatherflow/sensor.py @@ -268,6 +268,7 @@ SENSORS: tuple[WeatherFlowSensorEntityDescription, ...] = ( key="wind_direction", translation_key="wind_direction", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, native_unit_of_measurement=DEGREE, event_subscriptions=[EVENT_RAPID_WIND, EVENT_OBSERVATION], raw_data_conv_fn=lambda raw_data: raw_data.magnitude, diff --git a/homeassistant/components/webdav/__init__.py b/homeassistant/components/webdav/__init__.py index 952a68d829f..36a03dce4d7 100644 --- a/homeassistant/components/webdav/__init__.py +++ b/homeassistant/components/webdav/__init__.py @@ -13,7 +13,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from .const import CONF_BACKUP_PATH, DATA_BACKUP_AGENT_LISTENERS, DOMAIN -from .helpers import async_create_client, async_ensure_path_exists +from .helpers import ( + async_create_client, + async_ensure_path_exists, + async_migrate_wrong_folder_path, +) type WebDavConfigEntry = ConfigEntry[Client] @@ -46,10 +50,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: WebDavConfigEntry) -> bo translation_key="cannot_connect", ) + path = entry.data.get(CONF_BACKUP_PATH, "/") + await async_migrate_wrong_folder_path(client, path) + # Ensure the backup directory exists - if not await async_ensure_path_exists( - client, entry.data.get(CONF_BACKUP_PATH, "/") - ): + if not await async_ensure_path_exists(client, path): raise ConfigEntryNotReady( translation_domain=DOMAIN, translation_key="cannot_access_or_create_backup_path", diff --git a/homeassistant/components/webdav/backup.py b/homeassistant/components/webdav/backup.py index 321ed98bfa8..fb2927a58bb 100644 --- a/homeassistant/components/webdav/backup.py +++ b/homeassistant/components/webdav/backup.py @@ -5,10 +5,10 @@ from __future__ import annotations from collections.abc import AsyncIterator, Callable, Coroutine from functools import wraps import logging +from time import time from typing import Any, Concatenate from aiohttp import ClientTimeout -from aiowebdav2 import Property, PropertyRequest from aiowebdav2.exceptions import UnauthorizedError, WebDavError from propcache.api import cached_property @@ -28,9 +28,8 @@ from .const import CONF_BACKUP_PATH, DATA_BACKUP_AGENT_LISTENERS, DOMAIN _LOGGER = logging.getLogger(__name__) -METADATA_VERSION = "1" BACKUP_TIMEOUT = ClientTimeout(connect=10, total=43200) -NAMESPACE = "https://home-assistant.io" +CACHE_TTL = 300 async def async_get_backup_agents( @@ -96,23 +95,6 @@ def suggested_filenames(backup: AgentBackup) -> tuple[str, str]: return f"{base_name}.tar", f"{base_name}.metadata.json" -def _is_current_metadata_version(properties: list[Property]) -> bool: - """Check if any property is of the current metadata version.""" - return any( - prop.value == METADATA_VERSION - for prop in properties - if prop.namespace == NAMESPACE and prop.name == "metadata_version" - ) - - -def _backup_id_from_properties(properties: list[Property]) -> str | None: - """Return the backup ID from properties.""" - for prop in properties: - if prop.namespace == NAMESPACE and prop.name == "backup_id": - return prop.value - return None - - class WebDavBackupAgent(BackupAgent): """Backup agent interface.""" @@ -126,6 +108,8 @@ class WebDavBackupAgent(BackupAgent): self._client = entry.runtime_data self.name = entry.title self.unique_id = entry.entry_id + self._cache_metadata_files: dict[str, AgentBackup] = {} + self._cache_expiration = time() @cached_property def _backup_path(self) -> str: @@ -182,27 +166,14 @@ class WebDavBackupAgent(BackupAgent): f"{self._backup_path}/{filename_meta}", ) - await self._client.set_property_batch( - f"{self._backup_path}/{filename_meta}", - [ - Property( - namespace=NAMESPACE, - name="backup_id", - value=backup.backup_id, - ), - Property( - namespace=NAMESPACE, - name="metadata_version", - value=METADATA_VERSION, - ), - ], - ) - _LOGGER.debug( "Uploaded metadata file for %s", f"{self._backup_path}/{filename_meta}", ) + # reset cache + self._cache_expiration = time() + @handle_backup_errors async def async_delete_backup( self, @@ -226,14 +197,13 @@ class WebDavBackupAgent(BackupAgent): backup_path, ) + # reset cache + self._cache_expiration = time() + @handle_backup_errors async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: """List backups.""" - metadata_files = await self._list_metadata_files() - return [ - await self._download_metadata(metadata_file) - for metadata_file in metadata_files.values() - ] + return list((await self._list_cached_metadata_files()).values()) @handle_backup_errors async def async_get_backup( @@ -244,38 +214,35 @@ class WebDavBackupAgent(BackupAgent): """Return a backup.""" return await self._find_backup_by_id(backup_id) - async def _list_metadata_files(self) -> dict[str, str]: - """List metadata files.""" - files = await self._client.list_with_properties( - self._backup_path, - [ - PropertyRequest( - namespace=NAMESPACE, - name="metadata_version", - ), - PropertyRequest( - namespace=NAMESPACE, - name="backup_id", - ), - ], - ) - return { - backup_id: file_name - for file_name, properties in files.items() - if file_name.endswith(".json") and _is_current_metadata_version(properties) - if (backup_id := _backup_id_from_properties(properties)) - } + async def _list_cached_metadata_files(self) -> dict[str, AgentBackup]: + """List metadata files with a cache.""" + if time() <= self._cache_expiration: + return self._cache_metadata_files + + async def _download_metadata(path: str) -> AgentBackup: + """Download metadata file.""" + iterator = await self._client.download_iter(path) + metadata = await anext(iterator) + return AgentBackup.from_dict(json_loads_object(metadata)) + + async def _list_metadata_files() -> dict[str, AgentBackup]: + """List metadata files.""" + files = await self._client.list_files(self._backup_path) + return { + metadata_content.backup_id: metadata_content + for file_name in files + if file_name.endswith(".json") + if (metadata_content := await _download_metadata(file_name)) + } + + self._cache_metadata_files = await _list_metadata_files() + self._cache_expiration = time() + CACHE_TTL + return self._cache_metadata_files async def _find_backup_by_id(self, backup_id: str) -> AgentBackup: """Find a backup by its backup ID on remote.""" - metadata_files = await self._list_metadata_files() + metadata_files = await self._list_cached_metadata_files() if metadata_file := metadata_files.get(backup_id): - return await self._download_metadata(metadata_file) + return metadata_file raise BackupNotFound(f"Backup {backup_id} not found") - - async def _download_metadata(self, path: str) -> AgentBackup: - """Download metadata file.""" - iterator = await self._client.download_iter(path) - metadata = await anext(iterator) - return AgentBackup.from_dict(json_loads_object(metadata)) diff --git a/homeassistant/components/webdav/config_flow.py b/homeassistant/components/webdav/config_flow.py index f75544d25ad..e3e46d2575a 100644 --- a/homeassistant/components/webdav/config_flow.py +++ b/homeassistant/components/webdav/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from aiowebdav2.exceptions import UnauthorizedError +from aiowebdav2.exceptions import MethodNotSupportedError, UnauthorizedError import voluptuous as vol import yarl @@ -65,7 +65,9 @@ class WebDavConfigFlow(ConfigFlow, domain=DOMAIN): result = await client.check() except UnauthorizedError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except MethodNotSupportedError: + errors["base"] = "invalid_method" + except Exception: _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: diff --git a/homeassistant/components/webdav/helpers.py b/homeassistant/components/webdav/helpers.py index 9f91ed3bdb3..442f69b4d3c 100644 --- a/homeassistant/components/webdav/helpers.py +++ b/homeassistant/components/webdav/helpers.py @@ -1,10 +1,18 @@ """Helper functions for the WebDAV component.""" +import logging + from aiowebdav2.client import Client, ClientOptions +from aiowebdav2.exceptions import WebDavError from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + @callback def async_create_client( @@ -36,3 +44,25 @@ async def async_ensure_path_exists(client: Client, path: str) -> bool: return False return True + + +async def async_migrate_wrong_folder_path(client: Client, path: str) -> None: + """Migrate the wrong encoded folder path to the correct one.""" + wrong_path = path.replace(" ", "%20") + # migrate folder when the old folder exists + if wrong_path != path and await client.check(wrong_path): + try: + await client.move(wrong_path, path) + except WebDavError as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="failed_to_migrate_folder", + translation_placeholders={ + "wrong_path": wrong_path, + "correct_path": path, + }, + ) from err + + _LOGGER.debug( + "Migrated wrong encoded folder path from %s to %s", wrong_path, path + ) diff --git a/homeassistant/components/webdav/manifest.json b/homeassistant/components/webdav/manifest.json index fd3c749781e..30028cb28c9 100644 --- a/homeassistant/components/webdav/manifest.json +++ b/homeassistant/components/webdav/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["aiowebdav2"], "quality_scale": "bronze", - "requirements": ["aiowebdav2==0.4.1"] + "requirements": ["aiowebdav2==0.4.2"] } diff --git a/homeassistant/components/webdav/strings.json b/homeassistant/components/webdav/strings.json index 57117cdd9de..ac6418f1239 100644 --- a/homeassistant/components/webdav/strings.json +++ b/homeassistant/components/webdav/strings.json @@ -21,6 +21,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_method": "The server does not support the required methods. Please check whether you have the correct URL. Check with your provider for the correct URL.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { @@ -36,6 +37,9 @@ }, "cannot_access_or_create_backup_path": { "message": "Cannot access or create backup path. Please check the path and permissions." + }, + "failed_to_migrate_folder": { + "message": "Failed to migrate wrong encoded folder \"{wrong_path}\" to \"{correct_path}\"." } } } diff --git a/homeassistant/components/webmin/strings.json b/homeassistant/components/webmin/strings.json index 9a6d6d4fbe4..b92986f917a 100644 --- a/homeassistant/components/webmin/strings.json +++ b/homeassistant/components/webmin/strings.json @@ -29,13 +29,13 @@ "entity": { "sensor": { "load_1m": { - "name": "Load (1m)" + "name": "Load (1 min)" }, "load_5m": { - "name": "Load (5m)" + "name": "Load (5 min)" }, "load_15m": { - "name": "Load (15m)" + "name": "Load (15 min)" }, "mem_total": { "name": "Memory total" diff --git a/homeassistant/components/whois/config_flow.py b/homeassistant/components/whois/config_flow.py index cb4326d996d..a8306be7632 100644 --- a/homeassistant/components/whois/config_flow.py +++ b/homeassistant/components/whois/config_flow.py @@ -11,6 +11,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -48,6 +50,10 @@ class WhoisFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "unexpected_response" except UnknownDateFormat: errors["base"] = "unknown_date_format" + except WhoisPrivateRegistry: + errors["base"] = "private_registry" + except WhoisQuotaExceeded: + errors["base"] = "quota_exceeded" else: return self.async_create_entry( title=self.imported_name or user_input[CONF_DOMAIN], diff --git a/homeassistant/components/whois/strings.json b/homeassistant/components/whois/strings.json index c28c079784d..3b0f9dfd4d1 100644 --- a/homeassistant/components/whois/strings.json +++ b/homeassistant/components/whois/strings.json @@ -11,7 +11,9 @@ "unexpected_response": "Unexpected response from whois server", "unknown_date_format": "Unknown date format in whois server response", "unknown_tld": "The given TLD is unknown or not available to this integration", - "whois_command_failed": "Whois command failed: could not retrieve whois information" + "whois_command_failed": "Whois command failed: could not retrieve whois information", + "private_registry": "The given domain is registered in a private registry and cannot be monitored", + "quota_exceeded": "Your whois quota has been exceeded for this TLD" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 28a0fbd1492..f20145f8bf9 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -122,7 +122,7 @@ MEASUREMENT_SENSORS: dict[ measurement_type=MeasurementType.HEIGHT, translation_key="height", native_unit_of_measurement=UnitOfLength.METERS, - suggested_display_precision=1, + suggested_display_precision=2, device_class=SensorDeviceClass.DISTANCE, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -326,6 +326,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.deep_sleep_duration, translation_key="deep_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, ), @@ -334,6 +335,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.sleep_latency, translation_key="time_to_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -343,6 +345,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.wake_up_latency, translation_key="time_to_wakeup", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -376,6 +379,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.light_sleep_duration, translation_key="light_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -385,6 +389,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.rem_sleep_duration, translation_key="rem_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -451,6 +456,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.total_time_awake, translation_key="wakeup_time", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 964d192d279..5f3a6366fe1 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.19"] + "requirements": ["wolf-comm==0.0.23"] } diff --git a/homeassistant/components/wolflink/sensor.py b/homeassistant/components/wolflink/sensor.py index cf6d712dd0d..9380c28de89 100644 --- a/homeassistant/components/wolflink/sensor.py +++ b/homeassistant/components/wolflink/sensor.py @@ -2,19 +2,42 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass + from wolf_comm.models import ( + EnergyParameter, + FlowParameter, + FrequencyParameter, HoursParameter, ListItemParameter, Parameter, PercentageParameter, + PowerParameter, Pressure, + RPMParameter, SimpleParameter, Temperature, ) -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfPressure, UnitOfTemperature, UnitOfTime +from homeassistant.const import ( + PERCENTAGE, + REVOLUTIONS_PER_MINUTE, + UnitOfEnergy, + UnitOfFrequency, + UnitOfPower, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolumeFlowRate, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -23,31 +46,106 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import COORDINATOR, DEVICE_ID, DOMAIN, MANUFACTURER, PARAMETERS, STATES +def get_listitem_resolve_state(wolf_object, state): + """Resolve list item state.""" + resolved_state = [item for item in wolf_object.items if item.value == int(state)] + if resolved_state: + resolved_name = resolved_state[0].name + state = STATES.get(resolved_name, resolved_name) + return state + + +@dataclass(kw_only=True, frozen=True) +class WolflinkSensorEntityDescription(SensorEntityDescription): + """Describes Wolflink sensor entity.""" + + value_fn: Callable[[Parameter, str], str | None] = lambda param, value: value + supported_fn: Callable[[Parameter], bool] + + +SENSOR_DESCRIPTIONS = [ + WolflinkSensorEntityDescription( + key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + supported_fn=lambda param: isinstance(param, Temperature), + ), + WolflinkSensorEntityDescription( + key="pressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.BAR, + supported_fn=lambda param: isinstance(param, Pressure), + ), + WolflinkSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + supported_fn=lambda param: isinstance(param, EnergyParameter), + ), + WolflinkSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + supported_fn=lambda param: isinstance(param, PowerParameter), + ), + WolflinkSensorEntityDescription( + key="percentage", + native_unit_of_measurement=PERCENTAGE, + supported_fn=lambda param: isinstance(param, PercentageParameter), + ), + WolflinkSensorEntityDescription( + key="list_item", + translation_key="state", + supported_fn=lambda param: isinstance(param, ListItemParameter), + value_fn=get_listitem_resolve_state, + ), + WolflinkSensorEntityDescription( + key="hours", + icon="mdi:clock", + native_unit_of_measurement=UnitOfTime.HOURS, + supported_fn=lambda param: isinstance(param, HoursParameter), + ), + WolflinkSensorEntityDescription( + key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + supported_fn=lambda param: isinstance(param, FlowParameter), + ), + WolflinkSensorEntityDescription( + key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + supported_fn=lambda param: isinstance(param, FrequencyParameter), + ), + WolflinkSensorEntityDescription( + key="rpm", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + supported_fn=lambda param: isinstance(param, RPMParameter), + ), + WolflinkSensorEntityDescription( + key="default", + supported_fn=lambda param: isinstance(param, SimpleParameter), + ), +] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up all entries for Wolf Platform.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR] parameters = hass.data[DOMAIN][config_entry.entry_id][PARAMETERS] device_id = hass.data[DOMAIN][config_entry.entry_id][DEVICE_ID] - entities: list[WolfLinkSensor] = [] - for parameter in parameters: - if isinstance(parameter, Temperature): - entities.append(WolfLinkTemperature(coordinator, parameter, device_id)) - if isinstance(parameter, Pressure): - entities.append(WolfLinkPressure(coordinator, parameter, device_id)) - if isinstance(parameter, PercentageParameter): - entities.append(WolfLinkPercentage(coordinator, parameter, device_id)) - if isinstance(parameter, ListItemParameter): - entities.append(WolfLinkState(coordinator, parameter, device_id)) - if isinstance(parameter, HoursParameter): - entities.append(WolfLinkHours(coordinator, parameter, device_id)) - if isinstance(parameter, SimpleParameter): - entities.append(WolfLinkSensor(coordinator, parameter, device_id)) + entities: list[WolfLinkSensor] = [ + WolfLinkSensor(coordinator, parameter, device_id, description) + for parameter in parameters + for description in SENSOR_DESCRIPTIONS + if description.supported_fn(parameter) + ] async_add_entities(entities, True) @@ -55,9 +153,18 @@ async def async_setup_entry( class WolfLinkSensor(CoordinatorEntity, SensorEntity): """Base class for all Wolf entities.""" - def __init__(self, coordinator, wolf_object: Parameter, device_id) -> None: + entity_description: WolflinkSensorEntityDescription + + def __init__( + self, + coordinator, + wolf_object: Parameter, + device_id: str, + description: WolflinkSensorEntityDescription, + ) -> None: """Initialize.""" super().__init__(coordinator) + self.entity_description = description self.wolf_object = wolf_object self._attr_name = wolf_object.name self._attr_unique_id = f"{device_id}:{wolf_object.parameter_id}" @@ -69,68 +176,26 @@ class WolfLinkSensor(CoordinatorEntity, SensorEntity): ) @property - def native_value(self): + def native_value(self) -> str | None: """Return the state. Wolf Client is returning only changed values so we need to store old value here.""" if self.wolf_object.parameter_id in self.coordinator.data: new_state = self.coordinator.data[self.wolf_object.parameter_id] self.wolf_object.value_id = new_state[0] self._state = new_state[1] + if ( + isinstance(self.wolf_object, ListItemParameter) + and self._state is not None + ): + self._state = self.entity_description.value_fn( + self.wolf_object, self._state + ) return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, str | None]: """Return the state attributes.""" return { "parameter_id": self.wolf_object.parameter_id, "value_id": self.wolf_object.value_id, "parent": self.wolf_object.parent, } - - -class WolfLinkHours(WolfLinkSensor): - """Class for hour based entities.""" - - _attr_icon = "mdi:clock" - _attr_native_unit_of_measurement = UnitOfTime.HOURS - - -class WolfLinkTemperature(WolfLinkSensor): - """Class for temperature based entities.""" - - _attr_device_class = SensorDeviceClass.TEMPERATURE - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - - -class WolfLinkPressure(WolfLinkSensor): - """Class for pressure based entities.""" - - _attr_device_class = SensorDeviceClass.PRESSURE - _attr_native_unit_of_measurement = UnitOfPressure.BAR - - -class WolfLinkPercentage(WolfLinkSensor): - """Class for percentage based entities.""" - - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self.wolf_object.unit - - -class WolfLinkState(WolfLinkSensor): - """Class for entities which has defined list of state.""" - - _attr_translation_key = "state" - - @property - def native_value(self): - """Return the state converting with supported values.""" - state = super().native_value - if state is not None: - resolved_state = [ - item for item in self.wolf_object.items if item.value == int(state) - ] - if resolved_state: - resolved_name = resolved_state[0].name - return STATES.get(resolved_name, resolved_name) - return state diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index cc6b0f30002..b08a5ed9fff 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.68"] + "requirements": ["holidays==0.69"] } diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index d639933ece6..4e76287d8e7 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -8,15 +8,19 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType from .const import ATTR_SPEAKER, DOMAIN from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem +from .websocket_api import async_register_websocket_api _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + SATELLITE_PLATFORMS = [ Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, @@ -28,11 +32,19 @@ SATELLITE_PLATFORMS = [ __all__ = [ "ATTR_SPEAKER", "DOMAIN", + "async_setup", "async_setup_entry", "async_unload_entry", ] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Wyoming integration.""" + async_register_websocket_api(hass) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load Wyoming.""" service = await WyomingService.create(entry.data["host"], entry.data["port"]) diff --git a/homeassistant/components/wyoming/websocket_api.py b/homeassistant/components/wyoming/websocket_api.py new file mode 100644 index 00000000000..613238c302a --- /dev/null +++ b/homeassistant/components/wyoming/websocket_api.py @@ -0,0 +1,42 @@ +"""Wyoming Websocket API.""" + +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback + +from .const import DOMAIN +from .models import DomainDataItem + +_LOGGER = logging.getLogger(__name__) + + +@callback +def async_register_websocket_api(hass: HomeAssistant) -> None: + """Register the websocket API.""" + websocket_api.async_register_command(hass, websocket_info) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "wyoming/info"}) +def websocket_info( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List service information for Wyoming all config entries.""" + entry_items: dict[str, DomainDataItem] = hass.data.get(DOMAIN, {}) + + connection.send_result( + msg["id"], + { + "info": { + entry_id: item.service.info.to_dict() + for entry_id, item in entry_items.items() + } + }, + ) diff --git a/homeassistant/components/xiaomi_miio/button.py b/homeassistant/components/xiaomi_miio/button.py index a5d1b4b69c6..a7bcb3a12fe 100644 --- a/homeassistant/components/xiaomi_miio/button.py +++ b/homeassistant/components/xiaomi_miio/button.py @@ -117,7 +117,7 @@ MODEL_TO_BUTTON_MAP: dict[str, tuple[str, ...]] = { ATTR_RESET_DUST_FILTER, ATTR_RESET_UPPER_FILTER, ), - **{model: BUTTONS_FOR_VACUUM for model in MODELS_VACUUM}, + **dict.fromkeys(MODELS_VACUUM, BUTTONS_FOR_VACUUM), } diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 52ae8281f59..8c297c68670 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.8"] + "requirements": ["yolink-api==0.4.9"] } diff --git a/homeassistant/components/zamg/sensor.py b/homeassistant/components/zamg/sensor.py index 5846092e555..fdb9d51185c 100644 --- a/homeassistant/components/zamg/sensor.py +++ b/homeassistant/components/zamg/sensor.py @@ -82,7 +82,8 @@ SENSOR_TYPES: tuple[ZamgSensorEntityDescription, ...] = ( key="wind_bearing", name="Wind Bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, para_name="DD", ), ZamgSensorEntityDescription( diff --git a/homeassistant/components/zengge/light.py b/homeassistant/components/zengge/light.py index 2ab46820b56..ccb6733c650 100644 --- a/homeassistant/components/zengge/light.py +++ b/homeassistant/components/zengge/light.py @@ -2,138 +2,38 @@ from __future__ import annotations -import logging -from typing import Any - import voluptuous as vol -from zengge import zengge -from homeassistant.components.light import ( - ATTR_BRIGHTNESS, - ATTR_HS_COLOR, - ATTR_WHITE, - PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, - ColorMode, - LightEntity, -) +from homeassistant.components.light import PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA from homeassistant.const import CONF_DEVICES, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import color as color_util - -_LOGGER = logging.getLogger(__name__) DEVICE_SCHEMA = vol.Schema({vol.Optional(CONF_NAME): cv.string}) +DOMAIN = "zengge" PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend( {vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}} ) -def setup_platform( +def async_setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Zengge platform.""" - lights = [] - for address, device_config in config[CONF_DEVICES].items(): - light = ZenggeLight(device_config[CONF_NAME], address) - if light.is_valid: - lights.append(light) - - add_entities(lights, True) - - -class ZenggeLight(LightEntity): - """Representation of a Zengge light.""" - - _attr_supported_color_modes = {ColorMode.HS, ColorMode.WHITE} - - def __init__(self, name: str, address: str) -> None: - """Initialize the light.""" - - self._attr_name = name - self._attr_unique_id = address - self.is_valid = True - self._bulb = zengge(address) - self._white = 0 - self._attr_brightness = 0 - self._attr_hs_color = (0, 0) - self._attr_is_on = False - if self._bulb.connect() is False: - self.is_valid = False - _LOGGER.error("Failed to connect to bulb %s, %s", address, name) - return - - @property - def white_value(self) -> int: - """Return the white property.""" - return self._white - - @property - def color_mode(self) -> ColorMode: - """Return the current color mode.""" - if self._white != 0: - return ColorMode.WHITE - return ColorMode.HS - - def _set_rgb(self, red: int, green: int, blue: int) -> None: - """Set the rgb state.""" - self._bulb.set_rgb(red, green, blue) - - def _set_white(self, white): - """Set the white state.""" - return self._bulb.set_white(white) - - def turn_on(self, **kwargs: Any) -> None: - """Turn the specified light on.""" - self._attr_is_on = True - self._bulb.on() - - hs_color = kwargs.get(ATTR_HS_COLOR) - white = kwargs.get(ATTR_WHITE) - brightness = kwargs.get(ATTR_BRIGHTNESS) - - if white is not None: - # Change the bulb to white - self._attr_brightness = white - self._white = white - self._attr_hs_color = (0, 0) - - if hs_color is not None: - # Change the bulb to hs - self._white = 0 - self._attr_hs_color = hs_color - - if brightness is not None: - self._attr_brightness = brightness - - if self._white != 0: - self._set_white(self.brightness) - else: - assert self.hs_color is not None - assert self.brightness is not None - rgb = color_util.color_hsv_to_RGB( - self.hs_color[0], self.hs_color[1], self.brightness / 255 * 100 - ) - self._set_rgb(*rgb) - - def turn_off(self, **kwargs: Any) -> None: - """Turn the specified light off.""" - self._attr_is_on = False - self._bulb.off() - - def update(self) -> None: - """Synchronise internal state with the actual light state.""" - rgb = self._bulb.get_colour() - hsv = color_util.color_RGB_to_hsv(*rgb) - self._attr_hs_color = hsv[:2] - self._attr_brightness = int((hsv[2] / 100) * 255) - self._white = self._bulb.get_white() - if self._white: - self._attr_brightness = self._white - self._attr_is_on = self._bulb.get_on() + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "led_ble_url": "https://www.home-assistant.io/integrations/led_ble/", + }, + ) diff --git a/homeassistant/components/zengge/manifest.json b/homeassistant/components/zengge/manifest.json index 03d989c5f3b..daa63b4de3d 100644 --- a/homeassistant/components/zengge/manifest.json +++ b/homeassistant/components/zengge/manifest.json @@ -5,6 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/zengge", "iot_class": "local_polling", "loggers": ["zengge"], - "quality_scale": "legacy", - "requirements": ["bluepy==1.3.0", "zengge==0.2"] + "quality_scale": "legacy" } diff --git a/homeassistant/components/zengge/strings.json b/homeassistant/components/zengge/strings.json new file mode 100644 index 00000000000..abc3b2450aa --- /dev/null +++ b/homeassistant/components/zengge/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "integration_removed": { + "title": "The Zengge integration has been removed", + "description": "The Zengge integration has been removed from Home Assistant. Support for Zengge lights is provided by the `led_ble` integration.\n\nTo resolve this issue, please remove the (now defunct) `zengge` light configuration from your Home Assistant configuration and [configure the `led_ble` integration]({led_ble_url})." + } + } +} diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 0cc2524469e..4daa2f2aa40 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["zha==0.0.51"], + "requirements": ["zha==0.0.54"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index be1642227bd..a35dd50df54 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -176,7 +176,7 @@ }, "config_panel": { "zha_options": { - "title": "Global Options", + "title": "Global options", "enhanced_light_transition": "Enable enhanced light color/temperature transition from an off-state", "light_transitioning_flag": "Enable enhanced brightness slider during light transition", "group_members_assume_state": "Group members assume state of group", @@ -187,7 +187,7 @@ "consider_unavailable_battery": "Consider battery powered devices unavailable after (seconds)" }, "zha_alarm_options": { - "title": "Alarm Control Panel Options", + "title": "Alarm control panel options", "alarm_master_code": "Master code for the alarm control panel(s)", "alarm_failed_tries": "The number of consecutive failed code entries to trigger an alarm", "alarm_arm_requires_code": "Code required for arming actions" @@ -610,6 +610,12 @@ }, "flow_switch": { "name": "Flow switch" + }, + "water_leak": { + "name": "Water leak" + }, + "water_supply": { + "name": "Water supply" } }, "button": { @@ -1101,6 +1107,27 @@ }, "shutdown_timer": { "name": "Shutdown timer" + }, + "calibration_vertical_run_time_up": { + "name": "Calibration vertical run time up" + }, + "calibration_vertical_run_time_down": { + "name": "Calibration vertical run time down" + }, + "calibration_rotation_run_time_up": { + "name": "Calibration rotation run time up" + }, + "calibration_rotation_run_time_down": { + "name": "Calibration rotation run time down" + }, + "impulse_mode_duration": { + "name": "Impulse mode duration" + }, + "water_duration": { + "name": "Water duration" + }, + "water_interval": { + "name": "Water interval" } }, "select": { @@ -1144,10 +1171,10 @@ "name": "Switch type" }, "led_scaling_mode": { - "name": "Led scaling mode" + "name": "LED scaling mode" }, "smart_fan_led_display_levels": { - "name": "Smart fan led display levels" + "name": "Smart fan LED display levels" }, "increased_non_neutral_output": { "name": "Non neutral output" @@ -1319,6 +1346,9 @@ }, "hysteresis_mode": { "name": "Hysteresis mode" + }, + "speed": { + "name": "Speed" } }, "sensor": { @@ -1666,6 +1696,9 @@ }, "last_watering_duration": { "name": "Last watering duration" + }, + "device_status": { + "name": "Device status" } }, "switch": { diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index c8503b1f4c6..a7b8f9ed665 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import defaultdict -from contextlib import suppress +import contextlib import logging from typing import Any @@ -12,7 +12,11 @@ from awesomeversion import AwesomeVersion import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass, RemoveNodeReason -from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion +from zwave_js_server.exceptions import ( + BaseZwaveJSServerError, + InvalidServerVersion, + NotConnected, +) from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node as ZwaveNode from zwave_js_server.model.notification import ( @@ -25,7 +29,7 @@ from zwave_js_server.model.value import Value, ValueNotification from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.components.persistent_notification import async_create -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( ATTR_DEVICE_ID, ATTR_DOMAIN, @@ -36,7 +40,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import ( config_validation as cv, device_registry as dr, @@ -130,9 +134,8 @@ from .migrate import async_migrate_discovered_value from .services import ZWaveServices CONNECT_TIMEOUT = 10 -DATA_CLIENT_LISTEN_TASK = "client_listen_task" DATA_DRIVER_EVENTS = "driver_events" -DATA_START_CLIENT_TASK = "start_client_task" +DRIVER_READY_TIMEOUT = 60 CONFIG_SCHEMA = vol.Schema( { @@ -145,6 +148,24 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.EVENT, + Platform.FAN, + Platform.HUMIDIFIER, + Platform.LIGHT, + Platform.LOCK, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.UPDATE, +] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Z-Wave JS component.""" @@ -196,53 +217,99 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady(f"Failed to connect: {err}") from err async_delete_issue(hass, DOMAIN, "invalid_server_version") - LOGGER.info("Connected to Zwave JS Server") + LOGGER.debug("Connected to Zwave JS Server") # Set up websocket API async_register_api(hass) - entry.runtime_data = {} - # Create a task to allow the config entry to be unloaded before the driver is ready. - # Unloading the config entry is needed if the client listen task errors. - start_client_task = hass.async_create_task(start_client(hass, entry, client)) - entry.runtime_data[DATA_START_CLIENT_TASK] = start_client_task + driver_ready = asyncio.Event() + listen_task = entry.async_create_background_task( + hass, + client_listen(hass, entry, client, driver_ready), + f"{DOMAIN}_{entry.title}_client_listen", + ) - return True - - -async def start_client( - hass: HomeAssistant, entry: ConfigEntry, client: ZwaveClient -) -> None: - """Start listening with the client.""" - entry.runtime_data[DATA_CLIENT] = client - driver_events = entry.runtime_data[DATA_DRIVER_EVENTS] = DriverEvents(hass, entry) + entry.async_on_unload(client.disconnect) async def handle_ha_shutdown(event: Event) -> None: """Handle HA shutdown.""" - await disconnect_client(hass, entry) + await client.disconnect() - listen_task = asyncio.create_task( - client_listen(hass, entry, client, driver_events.ready) - ) - entry.runtime_data[DATA_CLIENT_LISTEN_TASK] = listen_task entry.async_on_unload( hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, handle_ha_shutdown) ) - try: - await driver_events.ready.wait() - except asyncio.CancelledError: - LOGGER.debug("Cancelling start client") - return - - LOGGER.info("Connection to Zwave JS Server initialized") - - assert client.driver - async_dispatcher_send( - hass, f"{DOMAIN}_{client.driver.controller.home_id}_connected_to_server" + driver_ready_task = entry.async_create_task( + hass, + driver_ready.wait(), + f"{DOMAIN}_{entry.title}_driver_ready", + ) + done, pending = await asyncio.wait( + (driver_ready_task, listen_task), + return_when=asyncio.FIRST_COMPLETED, + timeout=DRIVER_READY_TIMEOUT, ) - await driver_events.setup(client.driver) + if driver_ready_task in pending or listen_task in done: + error_message = "Driver ready timed out" + listen_error: BaseException | None = None + if listen_task.done(): + listen_error, error_message = _get_listen_task_error(listen_task) + else: + listen_task.cancel() + driver_ready_task.cancel() + raise ConfigEntryNotReady(error_message) from listen_error + + LOGGER.debug("Connection to Zwave JS Server initialized") + + entry_runtime_data = entry.runtime_data = { + DATA_CLIENT: client, + } + entry_runtime_data[DATA_DRIVER_EVENTS] = driver_events = DriverEvents(hass, entry) + + driver = client.driver + # When the driver is ready we know it's set on the client. + assert driver is not None + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + with contextlib.suppress(NotConnected): + # If the client isn't connected the listen task may have an exception + # and we'll handle the clean up below. + await driver_events.setup(driver) + + # If the listen task is already failed, we need to raise ConfigEntryNotReady + if listen_task.done(): + listen_error, error_message = _get_listen_task_error(listen_task) + await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + raise ConfigEntryNotReady(error_message) from listen_error + + # Re-attach trigger listeners. + # Schedule this call to make sure the config entry is loaded first. + + @callback + def on_config_entry_loaded() -> None: + """Signal that server connection and driver are ready.""" + if entry.state is ConfigEntryState.LOADED: + async_dispatcher_send( + hass, + f"{DOMAIN}_{driver.controller.home_id}_connected_to_server", + ) + + entry.async_on_unload(entry.async_on_state_change(on_config_entry_loaded)) + + return True + + +def _get_listen_task_error( + listen_task: asyncio.Task, +) -> tuple[BaseException | None, str]: + """Check the listen task for errors.""" + if listen_error := listen_task.exception(): + error_message = f"Client listen failed: {listen_error}" + else: + error_message = "Client connection was closed" + return listen_error, error_message class DriverEvents: @@ -255,8 +322,6 @@ class DriverEvents: self.config_entry = entry self.dev_reg = dr.async_get(hass) self.hass = hass - self.platform_setup_tasks: dict[str, asyncio.Task] = {} - self.ready = asyncio.Event() # Make sure to not pass self to ControllerEvents until all attributes are set. self.controller_events = ControllerEvents(hass, self) @@ -339,16 +404,6 @@ class DriverEvents: controller.on("identify", self.controller_events.async_on_identify) ) - async def async_setup_platform(self, platform: Platform) -> None: - """Set up platform if needed.""" - if platform not in self.platform_setup_tasks: - self.platform_setup_tasks[platform] = self.hass.async_create_task( - self.hass.config_entries.async_forward_entry_setups( - self.config_entry, [platform] - ) - ) - await self.platform_setup_tasks[platform] - class ControllerEvents: """Represent controller events. @@ -380,9 +435,6 @@ class ControllerEvents: async def async_on_node_added(self, node: ZwaveNode) -> None: """Handle node added event.""" - # Every node including the controller will have at least one sensor - await self.driver_events.async_setup_platform(Platform.SENSOR) - # Remove stale entities that may exist from a previous interview when an # interview is started. base_unique_id = get_valueless_base_unique_id(self.driver_events.driver, node) @@ -411,7 +463,6 @@ class ControllerEvents: ) # Create a ping button for each device - await self.driver_events.async_setup_platform(Platform.BUTTON) async_dispatcher_send( self.hass, f"{DOMAIN}_{self.config_entry.entry_id}_add_ping_button_entity", @@ -668,9 +719,6 @@ class NodeEvents: cc.id == CommandClass.FIRMWARE_UPDATE_MD.value for cc in node.command_classes ): - await self.controller_events.driver_events.async_setup_platform( - Platform.UPDATE - ) async_dispatcher_send( self.hass, f"{DOMAIN}_{self.config_entry.entry_id}_add_firmware_update_entity", @@ -701,21 +749,19 @@ class NodeEvents: value_updates_disc_info: dict[str, ZwaveDiscoveryInfo], ) -> None: """Handle discovery info and all dependent tasks.""" + platform = disc_info.platform # This migration logic was added in 2021.3 to handle a breaking change to # the value_id format. Some time in the future, this call (as well as the # helper functions) can be removed. async_migrate_discovered_value( self.hass, self.ent_reg, - self.controller_events.registered_unique_ids[device.id][disc_info.platform], + self.controller_events.registered_unique_ids[device.id][platform], device, self.controller_events.driver_events.driver, disc_info, ) - platform = disc_info.platform - await self.controller_events.driver_events.async_setup_platform(platform) - LOGGER.debug("Discovered entity: %s", disc_info) async_dispatcher_send( self.hass, @@ -930,63 +976,37 @@ async def client_listen( driver_ready: asyncio.Event, ) -> None: """Listen with the client.""" - should_reload = True try: await client.listen(driver_ready) - except asyncio.CancelledError: - should_reload = False except BaseZwaveJSServerError as err: - LOGGER.error("Failed to listen: %s", err) - except Exception as err: # noqa: BLE001 + if entry.state is not ConfigEntryState.LOADED: + raise + LOGGER.error("Client listen failed: %s", err) + except Exception as err: # We need to guard against unknown exceptions to not crash this task. LOGGER.exception("Unexpected exception: %s", err) + if entry.state is not ConfigEntryState.LOADED: + raise # The entry needs to be reloaded since a new driver state # will be acquired on reconnect. # All model instances will be replaced when the new state is acquired. - if should_reload: - LOGGER.info("Disconnected from server. Reloading integration") - hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) - - -async def disconnect_client(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Disconnect client.""" - client: ZwaveClient = entry.runtime_data[DATA_CLIENT] - listen_task: asyncio.Task = entry.runtime_data[DATA_CLIENT_LISTEN_TASK] - start_client_task: asyncio.Task = entry.runtime_data[DATA_START_CLIENT_TASK] - driver_events: DriverEvents = entry.runtime_data[DATA_DRIVER_EVENTS] - listen_task.cancel() - start_client_task.cancel() - platform_setup_tasks = driver_events.platform_setup_tasks.values() - for task in platform_setup_tasks: - task.cancel() - - tasks = (listen_task, start_client_task, *platform_setup_tasks) - await asyncio.gather(*tasks, return_exceptions=True) - for task in tasks: - with suppress(asyncio.CancelledError): - await task - - if client.connected: - await client.disconnect() - LOGGER.info("Disconnected from Zwave JS Server") + if not hass.is_stopping: + if entry.state is not ConfigEntryState.LOADED: + raise HomeAssistantError("Listen task ended unexpectedly") + LOGGER.debug("Disconnected from server. Reloading integration") + hass.config_entries.async_schedule_reload(entry.entry_id) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - client: ZwaveClient = entry.runtime_data[DATA_CLIENT] - driver_events: DriverEvents = entry.runtime_data[DATA_DRIVER_EVENTS] - platforms = [ - platform - for platform, task in driver_events.platform_setup_tasks.items() - if not task.cancel() - ] - unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms) + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if client.connected and client.driver: - await async_disable_server_logging_if_needed(hass, entry, client.driver) - if DATA_CLIENT_LISTEN_TASK in entry.runtime_data: - await disconnect_client(hass, entry) + entry_runtime_data = entry.runtime_data + client: ZwaveClient = entry_runtime_data[DATA_CLIENT] + + if client.connected and (driver := client.driver): + await async_disable_server_logging_if_needed(hass, entry, driver) if entry.data.get(CONF_USE_ADDON) and entry.disabled_by: addon_manager: AddonManager = get_addon_manager(hass) diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index aef23cb73ea..dd698d9ed66 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -405,6 +405,7 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command( hass, websocket_try_parse_dsk_from_qr_code_string ) + websocket_api.async_register_command(hass, websocket_lookup_device) websocket_api.async_register_command(hass, websocket_supports_feature) websocket_api.async_register_command(hass, websocket_stop_inclusion) websocket_api.async_register_command(hass, websocket_stop_exclusion) @@ -454,6 +455,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_node_capabilities) websocket_api.async_register_command(hass, websocket_invoke_cc_api) websocket_api.async_register_command(hass, websocket_get_integration_settings) + websocket_api.async_register_command(hass, websocket_backup_nvm) + websocket_api.async_register_command(hass, websocket_restore_nvm) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -518,6 +521,7 @@ async def websocket_network_status( "supported_function_types": controller.supported_function_types, "suc_node_id": controller.suc_node_id, "supports_timers": controller.supports_timers, + "supports_long_range": controller.supports_long_range, "is_rebuilding_routes": controller.is_rebuilding_routes, "inclusion_state": controller.inclusion_state, "rf_region": controller.rf_region, @@ -972,13 +976,7 @@ async def websocket_validate_dsk_and_enter_pin( { vol.Required(TYPE): "zwave_js/provision_smart_start_node", vol.Required(ENTRY_ID): str, - vol.Exclusive( - PLANNED_PROVISIONING_ENTRY, "options" - ): PLANNED_PROVISIONING_ENTRY_SCHEMA, - vol.Exclusive( - QR_PROVISIONING_INFORMATION, "options" - ): QR_PROVISIONING_INFORMATION_SCHEMA, - vol.Exclusive(QR_CODE_STRING, "options"): QR_CODE_STRING_SCHEMA, + vol.Required(QR_PROVISIONING_INFORMATION): QR_PROVISIONING_INFORMATION_SCHEMA, } ) @websocket_api.async_response @@ -993,28 +991,10 @@ async def websocket_provision_smart_start_node( driver: Driver, ) -> None: """Pre-provision a smart start node.""" - try: - cv.has_at_least_one_key( - PLANNED_PROVISIONING_ENTRY, QR_PROVISIONING_INFORMATION, QR_CODE_STRING - )(msg) - except vol.Invalid as err: - connection.send_error( - msg[ID], - ERR_INVALID_FORMAT, - err.args[0], - ) - return - provisioning_info = ( - msg.get(PLANNED_PROVISIONING_ENTRY) - or msg.get(QR_PROVISIONING_INFORMATION) - or msg[QR_CODE_STRING] - ) + provisioning_info = msg[QR_PROVISIONING_INFORMATION] - if ( - QR_PROVISIONING_INFORMATION in msg - and provisioning_info.version == QRCodeVersion.S2 - ): + if provisioning_info.version == QRCodeVersion.S2: connection.send_error( msg[ID], ERR_INVALID_FORMAT, @@ -1135,6 +1115,41 @@ async def websocket_try_parse_dsk_from_qr_code_string( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/lookup_device", + vol.Required(ENTRY_ID): str, + vol.Required(MANUFACTURER_ID): int, + vol.Required(PRODUCT_TYPE): int, + vol.Required(PRODUCT_ID): int, + vol.Optional(APPLICATION_VERSION): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_lookup_device( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Look up the definition of a given device in the configuration DB.""" + device = await driver.config_manager.lookup_device( + msg[MANUFACTURER_ID], + msg[PRODUCT_TYPE], + msg[PRODUCT_ID], + msg.get(APPLICATION_VERSION), + ) + if device is None: + connection.send_error(msg[ID], ERR_NOT_FOUND, "Device not found") + else: + connection.send_result(msg[ID], device.to_dict()) + + @websocket_api.require_admin @websocket_api.websocket_command( { @@ -2779,3 +2794,126 @@ def websocket_get_integration_settings( CONF_INSTALLER_MODE: hass.data[DOMAIN].get(CONF_INSTALLER_MODE, False), }, ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/backup_nvm", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_backup_nvm( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Backup NVM data.""" + controller = driver.controller + + @callback + def async_cleanup() -> None: + """Remove signal listeners.""" + for unsub in unsubs: + unsub() + + @callback + def forward_progress(event: dict) -> None: + """Forward progress events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": event["event"], + "bytesRead": event["bytesRead"], + "total": event["total"], + }, + ) + ) + + # Set up subscription for progress events + connection.subscriptions[msg["id"]] = async_cleanup + msg[DATA_UNSUBSCRIBE] = unsubs = [ + controller.on("nvm backup progress", forward_progress), + ] + + result = await controller.async_backup_nvm_raw_base64() + # Send the finished event with the backup data + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": "finished", + "data": result, + }, + ) + ) + connection.send_result(msg[ID]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/restore_nvm", + vol.Required(ENTRY_ID): str, + vol.Required("data"): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_restore_nvm( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Restore NVM data.""" + controller = driver.controller + + @callback + def async_cleanup() -> None: + """Remove signal listeners.""" + for unsub in unsubs: + unsub() + + @callback + def forward_progress(event: dict) -> None: + """Forward progress events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": event["event"], + "bytesRead": event.get("bytesRead"), + "bytesWritten": event.get("bytesWritten"), + "total": event["total"], + }, + ) + ) + + # Set up subscription for progress events + connection.subscriptions[msg["id"]] = async_cleanup + msg[DATA_UNSUBSCRIBE] = unsubs = [ + controller.on("nvm convert progress", forward_progress), + controller.on("nvm restore progress", forward_progress), + ] + + await controller.async_restore_nvm_base64(msg["data"]) + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": "finished", + }, + ) + ) + connection.send_result(msg[ID]) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 44adf6a12ab..aed0dd839be 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -42,7 +42,6 @@ from homeassistant.helpers.service_info.usb import UsbServiceInfo from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.helpers.typing import VolDictType -from . import disconnect_client from .addon import get_addon_manager from .const import ( ADDON_SLUG, @@ -861,7 +860,7 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): and self.config_entry.state == ConfigEntryState.LOADED ): # Disconnect integration before restarting add-on. - await disconnect_client(self.hass, self.config_entry) + await self.hass.config_entries.async_unload(self.config_entry.entry_id) return await self.async_step_start_addon() diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 904a26acc78..8a90ebf6f88 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -187,7 +187,7 @@ async def async_disable_server_logging_if_needed( old_server_log_level, ) await driver.async_update_log_config(LogConfig(level=old_server_log_level)) - await driver.client.disable_server_logging() + driver.client.disable_server_logging() LOGGER.info("Zwave-js-server logging is enabled") diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index a610bbcb91e..f60e129cc77 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import ( @@ -483,7 +483,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): red = multi_color.get(COLOR_SWITCH_COMBINED_RED, red_val.value) green = multi_color.get(COLOR_SWITCH_COMBINED_GREEN, green_val.value) blue = multi_color.get(COLOR_SWITCH_COMBINED_BLUE, blue_val.value) - if None not in (red, green, blue): + if red is not None and green is not None and blue is not None: # convert to HS self._hs_color = color_util.color_RGB_to_hs(red, green, blue) # Light supports color, set color mode to hs @@ -496,7 +496,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # Calculate color temps based on whites if cold_white or warm_white: self._color_temp = color_util.color_temperature_mired_to_kelvin( - MAX_MIREDS - ((cold_white / 255) * (MAX_MIREDS - MIN_MIREDS)) + MAX_MIREDS + - ((cast(int, cold_white) / 255) * (MAX_MIREDS - MIN_MIREDS)) ) # White channels turned on, set color mode to color_temp self._color_mode = ColorMode.COLOR_TEMP @@ -505,6 +506,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # only one white channel (warm white) = rgbw support elif red_val and green_val and blue_val and ww_val: white = multi_color.get(COLOR_SWITCH_COMBINED_WARM_WHITE, ww_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW @@ -512,6 +520,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): elif cw_val: self._supports_rgbw = True white = multi_color.get(COLOR_SWITCH_COMBINED_COLD_WHITE, cw_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index 3178bdf46ad..7e8b473922f 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -9,7 +9,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["zwave_js_server"], - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.60.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.62.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index bfea2c29eac..d3e681ecca1 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -195,8 +195,6 @@ DISCOVERY_SOURCES = { SOURCE_ZEROCONF, } -RECONFIGURE_NOTIFICATION_ID = "config_entry_reconfigure" - EVENT_FLOW_DISCOVERED = "config_entry_discovered" SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"]( @@ -1714,16 +1712,6 @@ class ConfigEntriesFlowManager( # Create notification. if source in DISCOVERY_SOURCES: await self._discovery_debouncer.async_call() - elif source == SOURCE_REAUTH: - persistent_notification.async_create( - self.hass, - title="Integration requires reconfiguration", - message=( - "At least one of your integrations requires reconfiguration to " - "continue functioning. [Check it out](/config/integrations)." - ), - notification_id=RECONFIGURE_NOTIFICATION_ID, - ) @callback def _async_discovery(self) -> None: @@ -2986,8 +2974,11 @@ class ConfigFlow(ConfigEntryBaseFlow): return None if raise_on_progress: - if self._async_in_progress( - include_uninitialized=True, match_context={"unique_id": unique_id} + if any( + flow["context"]["source"] != SOURCE_REAUTH + for flow in self._async_in_progress( + include_uninitialized=True, match_context={"unique_id": unique_id} + ) ): raise data_entry_flow.AbortFlow("already_in_progress") @@ -3116,29 +3107,6 @@ class ConfigFlow(ConfigEntryBaseFlow): """Handle a flow initialized by discovery.""" return await self._async_step_discovery_without_unique_id() - @callback - def async_abort( - self, - *, - reason: str, - description_placeholders: Mapping[str, str] | None = None, - ) -> ConfigFlowResult: - """Abort the config flow.""" - # Remove reauth notification if no reauth flows are in progress - if self.source == SOURCE_REAUTH and not any( - ent["flow_id"] != self.flow_id - for ent in self.hass.config_entries.flow.async_progress_by_handler( - self.handler, match_context={"source": SOURCE_REAUTH} - ) - ): - persistent_notification.async_dismiss( - self.hass, RECONFIGURE_NOTIFICATION_ID - ) - - return super().async_abort( - reason=reason, description_placeholders=description_placeholders - ) - async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak ) -> ConfigFlowResult: @@ -3488,18 +3456,14 @@ class ConfigSubentryFlow( return self.async_abort(reason="reconfigure_successful") @property - def _reconfigure_entry_id(self) -> str: - """Return reconfigure entry id.""" - if self.source != SOURCE_RECONFIGURE: - raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}") + def _entry_id(self) -> str: + """Return config entry id.""" return self.handler[0] @callback - def _get_reconfigure_entry(self) -> ConfigEntry: - """Return the reconfigure config entry linked to the current context.""" - return self.hass.config_entries.async_get_known_entry( - self._reconfigure_entry_id - ) + def _get_entry(self) -> ConfigEntry: + """Return the config entry linked to the current context.""" + return self.hass.config_entries.async_get_known_entry(self._entry_id) @property def _reconfigure_subentry_id(self) -> str: @@ -3511,9 +3475,7 @@ class ConfigSubentryFlow( @callback def _get_reconfigure_subentry(self) -> ConfigSubentry: """Return the reconfigure config subentry linked to the current context.""" - entry = self.hass.config_entries.async_get_known_entry( - self._reconfigure_entry_id - ) + entry = self.hass.config_entries.async_get_known_entry(self._entry_id) subentry_id = self._reconfigure_subentry_id if subentry_id not in entry.subentries: raise UnknownSubEntry(subentry_id) diff --git a/homeassistant/const.py b/homeassistant/const.py index b9695c350a7..a6f39db8532 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 -MINOR_VERSION: Final = 4 +MINOR_VERSION: Final = 5 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 251e22e7990..f7be891b61b 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -657,6 +657,19 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): ): continue + # Process the section schema options + if ( + suggested_values is not None + and isinstance(val, section) + and key in suggested_values + ): + new_section_key = copy.copy(key) + schema[new_section_key] = val + val.schema = self.add_suggested_values_to_schema( + val.schema, suggested_values[key] + ) + continue + new_key = key if ( suggested_values diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 587fea8b941..1ff444ca25f 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -356,6 +356,21 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "inkbird", "local_name": "tps", }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-11-B", + }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-13-B", + }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-21-B", + }, { "connectable": True, "domain": "iron_os", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 8284f77ef94..d192b8fcd13 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -91,6 +91,7 @@ FLOWS = { "bluetooth", "bmw_connected_drive", "bond", + "bosch_alarm", "bosch_shc", "braviatv", "bring", @@ -488,6 +489,7 @@ FLOWS = { "proximity", "prusalink", "ps4", + "pterodactyl", "pure_energie", "purpleair", "pushbullet", @@ -513,6 +515,7 @@ FLOWS = { "rdw", "recollect_waste", "refoss", + "remote_calendar", "renault", "renson", "reolink", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 3dba5a98f3c..8ee1ea270f3 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -498,6 +498,18 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "ring*", "macaddress": "341513*", }, + { + "domain": "roborock", + "macaddress": "249E7D*", + }, + { + "domain": "roborock", + "macaddress": "B04A39*", + }, + { + "domain": "roborock", + "hostname": "roborock-*", + }, { "domain": "roomba", "hostname": "irobot-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index b916526aaf3..58f7f7fab20 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -611,6 +611,13 @@ "config_flow": true, "iot_class": "local_push" }, + "backup": { + "name": "Backup", + "integration_type": "service", + "config_flow": false, + "iot_class": "calculated", + "single_config_entry": true + }, "baf": { "name": "Big Ass Fans", "integration_type": "hub", @@ -752,6 +759,12 @@ "config_flow": true, "iot_class": "local_push" }, + "bosch_alarm": { + "name": "Bosch Alarm", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" + }, "bosch_shc": { "name": "Bosch SHC", "integration_type": "hub", @@ -2489,6 +2502,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "hardkernel": { + "name": "Hardkernel", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "harman_kardon_avr": { "name": "Harman Kardon AVR", "integration_type": "hub", @@ -2632,6 +2651,23 @@ "integration_type": "virtual", "supported_by": "netatmo" }, + "homeassistant_green": { + "name": "Home Assistant Green", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, + "homeassistant_sky_connect": { + "name": "Home Assistant Connect ZBT-1", + "integration_type": "hardware", + "config_flow": true + }, + "homeassistant_yellow": { + "name": "Home Assistant Yellow", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "homee": { "name": "Homee", "integration_type": "hub", @@ -4000,7 +4036,10 @@ "iot_class": "assumed_state", "name": "Motionblinds Bluetooth" } - } + }, + "iot_standards": [ + "matter" + ] }, "motioneye": { "name": "motionEye", @@ -4988,6 +5027,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "pterodactyl": { + "name": "Pterodactyl", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "pulseaudio_loopback": { "name": "PulseAudio Loopback", "integration_type": "hub", @@ -5192,6 +5237,11 @@ "raspberry_pi": { "name": "Raspberry Pi", "integrations": { + "raspberry_pi": { + "integration_type": "hardware", + "config_flow": false, + "name": "Raspberry Pi" + }, "rpi_camera": { "integration_type": "hub", "config_flow": false, @@ -5265,6 +5315,11 @@ "config_flow": false, "iot_class": "cloud_push" }, + "remote_calendar": { + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "renault": { "name": "Renault", "integration_type": "hub", @@ -7690,6 +7745,7 @@ "plant", "proximity", "random", + "remote_calendar", "rpi_power", "schedule", "season", diff --git a/homeassistant/helpers/backup.py b/homeassistant/helpers/backup.py index 4ab302749a1..b3607f6653c 100644 --- a/homeassistant/helpers/backup.py +++ b/homeassistant/helpers/backup.py @@ -12,7 +12,11 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: - from homeassistant.components.backup import BackupManager, ManagerStateEvent + from homeassistant.components.backup import ( + BackupManager, + BackupPlatformEvent, + ManagerStateEvent, + ) DATA_BACKUP: HassKey[BackupData] = HassKey("backup_data") DATA_MANAGER: HassKey[BackupManager] = HassKey("backup") @@ -25,6 +29,9 @@ class BackupData: backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = field( default_factory=list ) + backup_platform_event_subscriptions: list[Callable[[BackupPlatformEvent], None]] = ( + field(default_factory=list) + ) manager_ready: asyncio.Future[None] = field(default_factory=asyncio.Future) @@ -68,3 +75,20 @@ def async_subscribe_events( backup_event_subscriptions.append(on_event) return remove_subscription + + +@callback +def async_subscribe_platform_events( + hass: HomeAssistant, + on_event: Callable[[BackupPlatformEvent], None], +) -> Callable[[], None]: + """Subscribe to backup platform events.""" + backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions + + def remove_subscription() -> None: + backup_platform_event_subscriptions.remove(on_event) + + backup_platform_event_subscriptions.append(on_event) + return remove_subscription diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 0841585e1a1..836536da9ee 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -8,6 +8,7 @@ import os from pathlib import Path from typing import NamedTuple, Self +from annotatedyaml import loader as yaml_loader import voluptuous as vol from homeassistant import loader @@ -29,7 +30,6 @@ from homeassistant.requirements import ( async_clear_install_history, async_get_integration_with_requirements, ) -from homeassistant.util.yaml import loader as yaml_loader from . import config_validation as cv from .typing import ConfigType diff --git a/homeassistant/helpers/config_entry_oauth2_flow.py b/homeassistant/helpers/config_entry_oauth2_flow.py index 24a9de5b562..84728978ede 100644 --- a/homeassistant/helpers/config_entry_oauth2_flow.py +++ b/homeassistant/helpers/config_entry_oauth2_flow.py @@ -11,7 +11,9 @@ from __future__ import annotations from abc import ABC, ABCMeta, abstractmethod import asyncio from asyncio import Lock +import base64 from collections.abc import Awaitable, Callable +import hashlib from http import HTTPStatus from json import JSONDecodeError import logging @@ -166,6 +168,11 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): """Extra data that needs to be appended to the authorize url.""" return {} + @property + def extra_token_resolve_data(self) -> dict: + """Extra data for the token resolve request.""" + return {} + async def async_generate_authorize_url(self, flow_id: str) -> str: """Generate a url for the user to authorize.""" redirect_uri = self.redirect_uri @@ -186,13 +193,13 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): async def async_resolve_external_data(self, external_data: Any) -> dict: """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - } - ) + request_data: dict = { + "grant_type": "authorization_code", + "code": external_data["code"], + "redirect_uri": external_data["state"]["redirect_uri"], + } + request_data.update(self.extra_token_resolve_data) + return await self._token_request(request_data) async def _async_refresh_token(self, token: dict) -> dict: """Refresh tokens.""" @@ -211,7 +218,7 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): data["client_id"] = self.client_id - if self.client_secret is not None: + if self.client_secret: data["client_secret"] = self.client_secret _LOGGER.debug("Sending token request to %s", self.token_url) @@ -233,6 +240,100 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): return cast(dict, await resp.json()) +class LocalOAuth2ImplementationWithPkce(LocalOAuth2Implementation): + """Local OAuth2 implementation with PKCE.""" + + def __init__( + self, + hass: HomeAssistant, + domain: str, + client_id: str, + authorize_url: str, + token_url: str, + client_secret: str = "", + code_verifier_length: int = 128, + ) -> None: + """Initialize local auth implementation.""" + super().__init__( + hass, + domain, + client_id, + client_secret, + authorize_url, + token_url, + ) + + # Generate code verifier + self.code_verifier = LocalOAuth2ImplementationWithPkce.generate_code_verifier( + code_verifier_length + ) + + @property + def extra_authorize_data(self) -> dict: + """Extra data that needs to be appended to the authorize url. + + If you want to override this method, + calling super is mandatory (for adding scopes): + ``` + @def extra_authorize_data(self) -> dict: + data: dict = { + "scope": "openid profile email", + } + data.update(super().extra_authorize_data) + return data + ``` + """ + return { + "code_challenge": LocalOAuth2ImplementationWithPkce.compute_code_challenge( + self.code_verifier + ), + "code_challenge_method": "S256", + } + + @property + def extra_token_resolve_data(self) -> dict: + """Extra data that needs to be included in the token resolve request. + + If you want to override this method, + calling super is mandatory (for adding `someKey`): + ``` + @def extra_token_resolve_data(self) -> dict: + data: dict = { + "someKey": "someValue", + } + data.update(super().extra_token_resolve_data) + return data + ``` + """ + + return {"code_verifier": self.code_verifier} + + @staticmethod + def generate_code_verifier(code_verifier_length: int = 128) -> str: + """Generate a code verifier.""" + if not 43 <= code_verifier_length <= 128: + msg = ( + "Parameter `code_verifier_length` must validate" + "`43 <= code_verifier_length <= 128`." + ) + raise ValueError(msg) + return secrets.token_urlsafe(96)[:code_verifier_length] + + @staticmethod + def compute_code_challenge(code_verifier: str) -> str: + """Compute the code challenge.""" + if not 43 <= len(code_verifier) <= 128: + msg = ( + "Parameter `code_verifier` must validate " + "`43 <= len(code_verifier) <= 128`." + ) + raise ValueError(msg) + + hashed = hashlib.sha256(code_verifier.encode("ascii")).digest() + encoded = base64.urlsafe_b64encode(hashed) + return encoded.decode("ascii").replace("=", "") + + class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta): """Handle a config flow.""" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 4978158c0f6..5c1a7c99565 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1153,41 +1153,6 @@ def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: return voluptuous_serialize.UNSUPPORTED -def expand_condition_shorthand(value: Any | None) -> Any: - """Expand boolean condition shorthand notations.""" - - if not isinstance(value, dict) or CONF_CONDITIONS in value: - return value - - for key, schema in ( - ("and", AND_CONDITION_SHORTHAND_SCHEMA), - ("or", OR_CONDITION_SHORTHAND_SCHEMA), - ("not", NOT_CONDITION_SHORTHAND_SCHEMA), - ): - try: - schema(value) - return { - CONF_CONDITION: key, - CONF_CONDITIONS: value[key], - **{k: value[k] for k in value if k != key}, - } - except vol.MultipleInvalid: - pass - - if isinstance(value.get(CONF_CONDITION), list): - try: - CONDITION_SHORTHAND_SCHEMA(value) - return { - CONF_CONDITION: "and", - CONF_CONDITIONS: value[CONF_CONDITION], - **{k: value[k] for k in value if k != CONF_CONDITION}, - } - except vol.MultipleInvalid: - pass - - return value - - # Schemas def empty_config_schema(domain: str) -> Callable[[dict], dict]: """Return a config schema which logs if there are configuration parameters.""" @@ -1683,7 +1648,43 @@ DEVICE_CONDITION_BASE_SCHEMA = vol.Schema( DEVICE_CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -dynamic_template_condition_action = vol.All( + +def expand_condition_shorthand(value: Any | None) -> Any: + """Expand boolean condition shorthand notations.""" + + if not isinstance(value, dict) or CONF_CONDITIONS in value: + return value + + for key, schema in ( + ("and", AND_CONDITION_SHORTHAND_SCHEMA), + ("or", OR_CONDITION_SHORTHAND_SCHEMA), + ("not", NOT_CONDITION_SHORTHAND_SCHEMA), + ): + try: + schema(value) + return { + CONF_CONDITION: key, + CONF_CONDITIONS: value[key], + **{k: value[k] for k in value if k != key}, + } + except vol.MultipleInvalid: + pass + + if isinstance(value.get(CONF_CONDITION), list): + try: + CONDITION_SHORTHAND_SCHEMA(value) + return { + CONF_CONDITION: "and", + CONF_CONDITIONS: value[CONF_CONDITION], + **{k: value[k] for k in value if k != CONF_CONDITION}, + } + except vol.MultipleInvalid: + pass + + return value + + +dynamic_template_condition = vol.All( # Wrap a shorthand template condition in a template condition dynamic_template, lambda config: { @@ -1724,7 +1725,7 @@ CONDITION_SCHEMA: vol.Schema = vol.Schema( }, ), ), - dynamic_template_condition_action, + dynamic_template_condition, ) ) @@ -1873,12 +1874,8 @@ _SCRIPT_REPEAT_SCHEMA = vol.Schema( vol.Exclusive(CONF_FOR_EACH, "repeat"): vol.Any( dynamic_template, vol.All(list, template_complex) ), - vol.Exclusive(CONF_WHILE, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), - vol.Exclusive(CONF_UNTIL, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Exclusive(CONF_WHILE, "repeat"): CONDITIONS_SCHEMA, + vol.Exclusive(CONF_UNTIL, "repeat"): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, }, has_at_least_one_key(CONF_COUNT, CONF_FOR_EACH, CONF_WHILE, CONF_UNTIL), @@ -1894,9 +1891,7 @@ _SCRIPT_CHOOSE_SCHEMA = vol.Schema( [ { vol.Optional(CONF_ALIAS): string, - vol.Required(CONF_CONDITIONS): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Required(CONF_CONDITIONS): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, } ], @@ -1917,7 +1912,7 @@ _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA = vol.Schema( _SCRIPT_IF_SCHEMA = vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Required(CONF_IF): vol.All(ensure_list, [CONDITION_SCHEMA]), + vol.Required(CONF_IF): CONDITIONS_SCHEMA, vol.Required(CONF_THEN): SCRIPT_SCHEMA, vol.Optional(CONF_ELSE): SCRIPT_SCHEMA, } diff --git a/homeassistant/helpers/deprecation.py b/homeassistant/helpers/deprecation.py index 375ec58c26f..101b9731caf 100644 --- a/homeassistant/helpers/deprecation.py +++ b/homeassistant/helpers/deprecation.py @@ -369,7 +369,7 @@ class EnumWithDeprecatedMembers(EnumType): """Enum with deprecated members.""" def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, cls: str, bases: tuple[type, ...], classdict: _EnumDict, diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index bed5ce586c5..bdcda58c054 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -281,7 +281,7 @@ class CachedProperties(type): """ def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, name: str, bases: tuple[type, ...], namespace: dict[Any, Any], diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 4ad2bdd6563..7f6fe22ec70 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -66,6 +66,11 @@ Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. """ +NO_ENTITIES_PROMPT = ( + "Only if the user wants to control a device, tell them to expose entities " + "to their voice assistant in Home Assistant." +) + @callback def async_render_no_api_prompt(hass: HomeAssistant) -> str: @@ -311,7 +316,7 @@ class AssistAPI(API): """Return the instance of the API.""" if llm_context.assistant: exposed_entities: dict | None = _get_exposed_entities( - self.hass, llm_context.assistant + self.hass, llm_context.assistant, include_state=False ) else: exposed_entities = None @@ -329,10 +334,7 @@ class AssistAPI(API): self, llm_context: LLMContext, exposed_entities: dict | None ) -> str: if not exposed_entities or not exposed_entities["entities"]: - return ( - "Only if the user wants to control a device, tell them to expose entities " - "to their voice assistant in Home Assistant." - ) + return NO_ENTITIES_PROMPT return "\n".join( [ *self._async_get_preable(llm_context), @@ -454,11 +456,16 @@ class AssistAPI(API): for script_entity_id in exposed_entities[SCRIPT_DOMAIN] ) + if exposed_domains: + tools.append(GetHomeStateTool()) + return tools def _get_exposed_entities( - hass: HomeAssistant, assistant: str + hass: HomeAssistant, + assistant: str, + include_state: bool = True, ) -> dict[str, dict[str, dict[str, Any]]]: """Get exposed entities. @@ -519,24 +526,28 @@ def _get_exposed_entities( info: dict[str, Any] = { "names": ", ".join(names), "domain": state.domain, - "state": state.state, } + if include_state: + info["state"] = state.state + if description: info["description"] = description if area_names: info["areas"] = ", ".join(area_names) - if attributes := { - attr_name: ( - str(attr_value) - if isinstance(attr_value, (Enum, Decimal, int)) - else attr_value - ) - for attr_name, attr_value in state.attributes.items() - if attr_name in interesting_attributes - }: + if include_state and ( + attributes := { + attr_name: ( + str(attr_value) + if isinstance(attr_value, (Enum, Decimal, int)) + else attr_value + ) + for attr_name, attr_value in state.attributes.items() + if attr_name in interesting_attributes + } + ): info["attributes"] = attributes if state.domain in data: @@ -885,3 +896,39 @@ class CalendarGetEventsTool(Tool): ] return {"success": True, "result": events} + + +class GetHomeStateTool(Tool): + """Tool for getting the current state of exposed entities. + + This returns state for all entities that have been exposed to + the assistant. This is different than the GetState intent, which + returns state for entities based on intent parameters. + """ + + name = "get_home_state" + description = "Get the current state of all devices in the home. " + + async def async_call( + self, + hass: HomeAssistant, + tool_input: ToolInput, + llm_context: LLMContext, + ) -> JsonObjectType: + """Get the current state of exposed entities.""" + if llm_context.assistant is None: + # Note this doesn't happen in practice since this tool won't be + # exposed if no assistant is configured. + return {"success": False, "error": "No assistant configured"} + + exposed_entities = _get_exposed_entities(hass, llm_context.assistant) + if not exposed_entities["entities"]: + return {"success": False, "error": NO_ENTITIES_PROMPT} + prompt = [ + "An overview of the areas and the devices in this smart home:", + yaml_util.dump(list(exposed_entities["entities"].values())), + ] + return { + "success": True, + "result": "\n".join(prompt), + } diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index bf7a4a0971c..1242ef3e4d5 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -966,12 +966,11 @@ class _ScriptRun: ## Variable actions ## async def _async_step_variables(self) -> None: - """Define a local variable.""" - self._step_log("defining local variables") - for key, value in ( - self._action[CONF_VARIABLES].async_simple_render(self._variables).items() - ): - self._variables.define_local(key, value) + """Assign values to variables.""" + self._step_log("assigning variables") + self._variables.update( + self._action[CONF_VARIABLES].async_simple_render(self._variables) + ) ## External actions ## diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index dd2fd8a677c..f2c76d1d019 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -1136,7 +1136,7 @@ class SelectOptionDict(TypedDict): class SelectSelectorMode(StrEnum): - """Possible modes for a number selector.""" + """Possible modes for a select selector.""" LIST = "list" DROPDOWN = "dropdown" diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 20531596fdd..70a94cfaaa9 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -2785,6 +2785,76 @@ def flatten(value: Iterable[Any], levels: int | None = None) -> list[Any]: return flattened +def intersect(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return the common elements between two lists.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"intersect expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"intersect expected a list, got {type(other).__name__}") + + return list(set(value) & set(other)) + + +def difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements in first list that are not in second list.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"difference expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"difference expected a list, got {type(other).__name__}") + + return list(set(value) - set(other)) + + +def union(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return all unique elements from both lists combined.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"union expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"union expected a list, got {type(other).__name__}") + + return list(set(value) | set(other)) + + +def symmetric_difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements that are in either list but not in both.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(value).__name__}" + ) + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(other).__name__}" + ) + + return list(set(value) ^ set(other)) + + +def combine(*args: Any, recursive: bool = False) -> dict[Any, Any]: + """Combine multiple dictionaries into one.""" + if not args: + raise TypeError("combine expected at least 1 argument, got 0") + + result: dict[Any, Any] = {} + for arg in args: + if not isinstance(arg, dict): + raise TypeError(f"combine expected a dict, got {type(arg).__name__}") + + if recursive: + for key, value in arg.items(): + if ( + key in result + and isinstance(result[key], dict) + and isinstance(value, dict) + ): + result[key] = combine(result[key], value, recursive=True) + else: + result[key] = value + else: + result |= arg + + return result + + def md5(value: str) -> str: """Generate md5 hash from a string.""" return hashlib.md5(value.encode()).hexdigest() @@ -2957,114 +3027,127 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): str | jinja2.nodes.Template, CodeType | None ] = weakref.WeakValueDictionary() self.add_extension("jinja2.ext.loopcontrols") - self.filters["round"] = forgiving_round - self.filters["multiply"] = multiply - self.filters["add"] = add - self.filters["log"] = logarithm - self.filters["sin"] = sine - self.filters["cos"] = cosine - self.filters["tan"] = tangent - self.filters["asin"] = arc_sine - self.filters["acos"] = arc_cosine - self.filters["atan"] = arc_tangent - self.filters["atan2"] = arc_tangent2 - self.filters["sqrt"] = square_root - self.filters["as_datetime"] = as_datetime - self.filters["as_timedelta"] = as_timedelta - self.filters["as_timestamp"] = forgiving_as_timestamp - self.filters["as_local"] = dt_util.as_local - self.filters["timestamp_custom"] = timestamp_custom - self.filters["timestamp_local"] = timestamp_local - self.filters["timestamp_utc"] = timestamp_utc - self.filters["to_json"] = to_json - self.filters["from_json"] = from_json - self.filters["is_defined"] = fail_when_undefined - self.filters["average"] = average - self.filters["median"] = median - self.filters["statistical_mode"] = statistical_mode - self.filters["random"] = random_every_time - self.filters["base64_encode"] = base64_encode - self.filters["base64_decode"] = base64_decode - self.filters["ordinal"] = ordinal - self.filters["regex_match"] = regex_match - self.filters["regex_replace"] = regex_replace - self.filters["regex_search"] = regex_search - self.filters["regex_findall"] = regex_findall - self.filters["regex_findall_index"] = regex_findall_index - self.filters["bitwise_and"] = bitwise_and - self.filters["bitwise_or"] = bitwise_or - self.filters["bitwise_xor"] = bitwise_xor - self.filters["pack"] = struct_pack - self.filters["unpack"] = struct_unpack - self.filters["ord"] = ord - self.filters["is_number"] = is_number - self.filters["float"] = forgiving_float_filter - self.filters["int"] = forgiving_int_filter - self.filters["slugify"] = slugify - self.filters["iif"] = iif - self.filters["bool"] = forgiving_boolean - self.filters["version"] = version - self.filters["contains"] = contains - self.filters["shuffle"] = shuffle - self.filters["typeof"] = typeof - self.filters["flatten"] = flatten - self.filters["md5"] = md5 - self.filters["sha1"] = sha1 - self.filters["sha256"] = sha256 - self.filters["sha512"] = sha512 - self.globals["log"] = logarithm - self.globals["sin"] = sine - self.globals["cos"] = cosine - self.globals["tan"] = tangent - self.globals["sqrt"] = square_root - self.globals["pi"] = math.pi - self.globals["tau"] = math.pi * 2 - self.globals["e"] = math.e - self.globals["asin"] = arc_sine + self.globals["acos"] = arc_cosine - self.globals["atan"] = arc_tangent - self.globals["atan2"] = arc_tangent2 - self.globals["float"] = forgiving_float self.globals["as_datetime"] = as_datetime self.globals["as_local"] = dt_util.as_local self.globals["as_timedelta"] = as_timedelta self.globals["as_timestamp"] = forgiving_as_timestamp - self.globals["timedelta"] = timedelta - self.globals["merge_response"] = merge_response - self.globals["strptime"] = strptime - self.globals["urlencode"] = urlencode + self.globals["asin"] = arc_sine + self.globals["atan"] = arc_tangent + self.globals["atan2"] = arc_tangent2 self.globals["average"] = average - self.globals["median"] = median - self.globals["statistical_mode"] = statistical_mode - self.globals["max"] = min_max_from_filter(self.filters["max"], "max") - self.globals["min"] = min_max_from_filter(self.filters["min"], "min") - self.globals["is_number"] = is_number - self.globals["set"] = _to_set - self.globals["tuple"] = _to_tuple - self.globals["int"] = forgiving_int - self.globals["pack"] = struct_pack - self.globals["unpack"] = struct_unpack - self.globals["slugify"] = slugify - self.globals["iif"] = iif self.globals["bool"] = forgiving_boolean - self.globals["version"] = version - self.globals["zip"] = zip - self.globals["shuffle"] = shuffle - self.globals["typeof"] = typeof + self.globals["combine"] = combine + self.globals["cos"] = cosine + self.globals["difference"] = difference + self.globals["e"] = math.e self.globals["flatten"] = flatten + self.globals["float"] = forgiving_float + self.globals["iif"] = iif + self.globals["int"] = forgiving_int + self.globals["intersect"] = intersect + self.globals["is_number"] = is_number + self.globals["log"] = logarithm + self.globals["max"] = min_max_from_filter(self.filters["max"], "max") self.globals["md5"] = md5 + self.globals["median"] = median + self.globals["merge_response"] = merge_response + self.globals["min"] = min_max_from_filter(self.filters["min"], "min") + self.globals["pack"] = struct_pack + self.globals["pi"] = math.pi + self.globals["set"] = _to_set self.globals["sha1"] = sha1 self.globals["sha256"] = sha256 self.globals["sha512"] = sha512 + self.globals["shuffle"] = shuffle + self.globals["sin"] = sine + self.globals["slugify"] = slugify + self.globals["sqrt"] = square_root + self.globals["statistical_mode"] = statistical_mode + self.globals["strptime"] = strptime + self.globals["symmetric_difference"] = symmetric_difference + self.globals["tan"] = tangent + self.globals["tau"] = math.pi * 2 + self.globals["timedelta"] = timedelta + self.globals["tuple"] = _to_tuple + self.globals["typeof"] = typeof + self.globals["union"] = union + self.globals["unpack"] = struct_unpack + self.globals["urlencode"] = urlencode + self.globals["version"] = version + self.globals["zip"] = zip + + self.filters["acos"] = arc_cosine + self.filters["add"] = add + self.filters["as_datetime"] = as_datetime + self.filters["as_local"] = dt_util.as_local + self.filters["as_timedelta"] = as_timedelta + self.filters["as_timestamp"] = forgiving_as_timestamp + self.filters["asin"] = arc_sine + self.filters["atan"] = arc_tangent + self.filters["atan2"] = arc_tangent2 + self.filters["average"] = average + self.filters["base64_decode"] = base64_decode + self.filters["base64_encode"] = base64_encode + self.filters["bitwise_and"] = bitwise_and + self.filters["bitwise_or"] = bitwise_or + self.filters["bitwise_xor"] = bitwise_xor + self.filters["bool"] = forgiving_boolean + self.filters["combine"] = combine + self.filters["contains"] = contains + self.filters["cos"] = cosine + self.filters["difference"] = difference + self.filters["flatten"] = flatten + self.filters["float"] = forgiving_float_filter + self.filters["from_json"] = from_json + self.filters["iif"] = iif + self.filters["int"] = forgiving_int_filter + self.filters["intersect"] = intersect + self.filters["is_defined"] = fail_when_undefined + self.filters["is_number"] = is_number + self.filters["log"] = logarithm + self.filters["md5"] = md5 + self.filters["median"] = median + self.filters["multiply"] = multiply + self.filters["ord"] = ord + self.filters["ordinal"] = ordinal + self.filters["pack"] = struct_pack + self.filters["random"] = random_every_time + self.filters["regex_findall_index"] = regex_findall_index + self.filters["regex_findall"] = regex_findall + self.filters["regex_match"] = regex_match + self.filters["regex_replace"] = regex_replace + self.filters["regex_search"] = regex_search + self.filters["round"] = forgiving_round + self.filters["sha1"] = sha1 + self.filters["sha256"] = sha256 + self.filters["sha512"] = sha512 + self.filters["shuffle"] = shuffle + self.filters["sin"] = sine + self.filters["slugify"] = slugify + self.filters["sqrt"] = square_root + self.filters["statistical_mode"] = statistical_mode + self.filters["symmetric_difference"] = symmetric_difference + self.filters["tan"] = tangent + self.filters["timestamp_custom"] = timestamp_custom + self.filters["timestamp_local"] = timestamp_local + self.filters["timestamp_utc"] = timestamp_utc + self.filters["to_json"] = to_json + self.filters["typeof"] = typeof + self.filters["union"] = union + self.filters["unpack"] = struct_unpack + self.filters["version"] = version + + self.tests["contains"] = contains + self.tests["datetime"] = _is_datetime self.tests["is_number"] = is_number self.tests["list"] = _is_list - self.tests["set"] = _is_set - self.tests["tuple"] = _is_tuple - self.tests["datetime"] = _is_datetime - self.tests["string_like"] = _is_string_like self.tests["match"] = regex_match self.tests["search"] = regex_search - self.tests["contains"] = contains + self.tests["set"] = _is_set + self.tests["string_like"] = _is_string_like + self.tests["tuple"] = _is_tuple if hass is None: return @@ -3091,28 +3174,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): return jinja_context(wrapper) - self.globals["device_entities"] = hassfunction(device_entities) - self.filters["device_entities"] = self.globals["device_entities"] - - self.globals["device_attr"] = hassfunction(device_attr) - self.filters["device_attr"] = self.globals["device_attr"] - - self.globals["config_entry_attr"] = hassfunction(config_entry_attr) - self.filters["config_entry_attr"] = self.globals["config_entry_attr"] - - self.globals["is_device_attr"] = hassfunction(is_device_attr) - self.tests["is_device_attr"] = hassfunction(is_device_attr, pass_eval_context) - - self.globals["config_entry_id"] = hassfunction(config_entry_id) - self.filters["config_entry_id"] = self.globals["config_entry_id"] - - self.globals["device_id"] = hassfunction(device_id) - self.filters["device_id"] = self.globals["device_id"] - - self.globals["issues"] = hassfunction(issues) - - self.globals["issue"] = hassfunction(issue) - self.filters["issue"] = self.globals["issue"] + # Area extensions self.globals["areas"] = hassfunction(areas) @@ -3128,6 +3190,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["area_devices"] = hassfunction(area_devices) self.filters["area_devices"] = self.globals["area_devices"] + # Floor extensions + self.globals["floors"] = hassfunction(floors) self.filters["floors"] = self.globals["floors"] @@ -3143,9 +3207,35 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["floor_entities"] = hassfunction(floor_entities) self.filters["floor_entities"] = self.globals["floor_entities"] + # Integration extensions + self.globals["integration_entities"] = hassfunction(integration_entities) self.filters["integration_entities"] = self.globals["integration_entities"] + # Config entry extensions + + self.globals["config_entry_attr"] = hassfunction(config_entry_attr) + self.filters["config_entry_attr"] = self.globals["config_entry_attr"] + + self.globals["config_entry_id"] = hassfunction(config_entry_id) + self.filters["config_entry_id"] = self.globals["config_entry_id"] + + # Device extensions + + self.globals["device_attr"] = hassfunction(device_attr) + self.filters["device_attr"] = self.globals["device_attr"] + + self.globals["device_entities"] = hassfunction(device_entities) + self.filters["device_entities"] = self.globals["device_entities"] + + self.globals["is_device_attr"] = hassfunction(is_device_attr) + self.tests["is_device_attr"] = hassfunction(is_device_attr, pass_eval_context) + + self.globals["device_id"] = hassfunction(device_id) + self.filters["device_id"] = self.globals["device_id"] + + # Label extensions + self.globals["labels"] = hassfunction(labels) self.filters["labels"] = self.globals["labels"] @@ -3164,6 +3254,12 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["label_entities"] = hassfunction(label_entities) self.filters["label_entities"] = self.globals["label_entities"] + # Issue extensions + + self.globals["issues"] = hassfunction(issues) + self.globals["issue"] = hassfunction(issue) + self.filters["issue"] = self.globals["issue"] + if limited: # Only device_entities is available to limited templates, mark other # functions and filters as unsupported. @@ -3176,38 +3272,38 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): return warn_unsupported hass_globals = [ - "closest", - "distance", - "expand", - "is_hidden_entity", - "is_state", - "is_state_attr", - "state_attr", - "states", - "state_translated", - "has_value", - "utcnow", - "now", - "device_attr", - "is_device_attr", - "device_id", "area_id", "area_name", + "closest", + "device_attr", + "device_id", + "distance", + "expand", "floor_id", "floor_name", + "has_value", + "is_device_attr", + "is_hidden_entity", + "is_state_attr", + "is_state", + "label_id", + "label_name", + "now", "relative_time", + "state_attr", + "state_translated", + "states", "time_since", "time_until", "today_at", - "label_id", - "label_name", + "utcnow", ] hass_filters = [ - "closest", - "expand", - "device_id", "area_id", "area_name", + "closest", + "device_id", + "expand", "floor_id", "floor_name", "has_value", @@ -3217,8 +3313,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): hass_tests = [ "has_value", "is_hidden_entity", - "is_state", "is_state_attr", + "is_state", ] for glob in hass_globals: self.globals[glob] = unsupported(glob) @@ -3228,38 +3324,46 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.filters[test] = unsupported(test) return - self.globals["expand"] = hassfunction(expand) - self.filters["expand"] = self.globals["expand"] self.globals["closest"] = hassfunction(closest) - self.filters["closest"] = hassfunction(closest_filter) self.globals["distance"] = hassfunction(distance) + self.globals["expand"] = hassfunction(expand) + self.globals["has_value"] = hassfunction(has_value) + self.globals["now"] = hassfunction(now) + self.globals["relative_time"] = hassfunction(relative_time) + self.globals["time_since"] = hassfunction(time_since) + self.globals["time_until"] = hassfunction(time_until) + self.globals["today_at"] = hassfunction(today_at) + self.globals["utcnow"] = hassfunction(utcnow) + + self.filters["closest"] = hassfunction(closest_filter) + self.filters["expand"] = self.globals["expand"] + self.filters["has_value"] = self.globals["has_value"] + self.filters["relative_time"] = self.globals["relative_time"] + self.filters["time_since"] = self.globals["time_since"] + self.filters["time_until"] = self.globals["time_until"] + self.filters["today_at"] = self.globals["today_at"] + + self.tests["has_value"] = hassfunction(has_value, pass_eval_context) + + # Entity extensions + self.globals["is_hidden_entity"] = hassfunction(is_hidden_entity) self.tests["is_hidden_entity"] = hassfunction( is_hidden_entity, pass_eval_context ) - self.globals["is_state"] = hassfunction(is_state) - self.tests["is_state"] = hassfunction(is_state, pass_eval_context) + + # State extensions + self.globals["is_state_attr"] = hassfunction(is_state_attr) - self.tests["is_state_attr"] = hassfunction(is_state_attr, pass_eval_context) + self.globals["is_state"] = hassfunction(is_state) self.globals["state_attr"] = hassfunction(state_attr) - self.filters["state_attr"] = self.globals["state_attr"] - self.globals["states"] = AllStates(hass) - self.filters["states"] = self.globals["states"] self.globals["state_translated"] = StateTranslated(hass) + self.globals["states"] = AllStates(hass) + self.filters["state_attr"] = self.globals["state_attr"] self.filters["state_translated"] = self.globals["state_translated"] - self.globals["has_value"] = hassfunction(has_value) - self.filters["has_value"] = self.globals["has_value"] - self.tests["has_value"] = hassfunction(has_value, pass_eval_context) - self.globals["utcnow"] = hassfunction(utcnow) - self.globals["now"] = hassfunction(now) - self.globals["relative_time"] = hassfunction(relative_time) - self.filters["relative_time"] = self.globals["relative_time"] - self.globals["time_since"] = hassfunction(time_since) - self.filters["time_since"] = self.globals["time_since"] - self.globals["time_until"] = hassfunction(time_until) - self.filters["time_until"] = self.globals["time_until"] - self.globals["today_at"] = hassfunction(today_at) - self.filters["today_at"] = self.globals["today_at"] + self.filters["states"] = self.globals["states"] + self.tests["is_state_attr"] = hassfunction(is_state_attr, pass_eval_context) + self.tests["is_state"] = hassfunction(is_state, pass_eval_context) def is_safe_callable(self, obj): """Test if callback is safe.""" diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 3bc33f8374c..20763dc7b30 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -40,6 +40,8 @@ from .generated.ssdp import SSDP from .generated.usb import USB from .generated.zeroconf import HOMEKIT, ZEROCONF from .helpers.json import json_bytes, json_fragment +from .helpers.typing import UNDEFINED, UndefinedType +from .util.async_ import create_eager_task from .util.hass_dict import HassKey from .util.json import JSON_DECODE_EXCEPTIONS, json_loads @@ -758,10 +760,8 @@ class Integration: manifest["overwrites_built_in"] = self.overwrites_built_in if self.dependencies: - self._all_dependencies_resolved: bool | None = None - self._all_dependencies: set[str] | None = None + self._all_dependencies: set[str] | Exception | None = None else: - self._all_dependencies_resolved = True self._all_dependencies = set() self._platforms_to_preload = hass.data[DATA_PRELOAD_PLATFORMS] @@ -933,47 +933,25 @@ class Integration: """Return all dependencies including sub-dependencies.""" if self._all_dependencies is None: raise RuntimeError("Dependencies not resolved!") + if isinstance(self._all_dependencies, Exception): + raise self._all_dependencies return self._all_dependencies @property def all_dependencies_resolved(self) -> bool: """Return if all dependencies have been resolved.""" - return self._all_dependencies_resolved is not None + return self._all_dependencies is not None - async def resolve_dependencies(self) -> bool: + async def resolve_dependencies(self) -> set[str] | None: """Resolve all dependencies.""" - if self._all_dependencies_resolved is not None: - return self._all_dependencies_resolved + if self._all_dependencies is not None: + if isinstance(self._all_dependencies, Exception): + return None + return self._all_dependencies - self._all_dependencies_resolved = False - try: - dependencies = await _async_component_dependencies(self.hass, self) - except IntegrationNotFound as err: - _LOGGER.error( - ( - "Unable to resolve dependencies for %s: unable to resolve" - " (sub)dependency %s" - ), - self.domain, - err.domain, - ) - except CircularDependency as err: - _LOGGER.error( - ( - "Unable to resolve dependencies for %s: it contains a circular" - " dependency: %s -> %s" - ), - self.domain, - err.from_domain, - err.to_domain, - ) - else: - dependencies.discard(self.domain) - self._all_dependencies = dependencies - self._all_dependencies_resolved = True - - return self._all_dependencies_resolved + result = await resolve_integrations_dependencies(self.hass, (self,)) + return result.get(self.domain) async def async_get_component(self) -> ComponentProtocol: """Return the component. @@ -1441,6 +1419,189 @@ async def async_get_integrations( return results +class _ResolveDependenciesCacheProtocol(Protocol): + def get(self, itg: Integration) -> set[str] | Exception | None: ... + + def __setitem__( + self, itg: Integration, all_dependencies: set[str] | Exception + ) -> None: ... + + +class _ResolveDependenciesCache(_ResolveDependenciesCacheProtocol): + """Cache for resolve_integrations_dependencies.""" + + def get(self, itg: Integration) -> set[str] | Exception | None: + return itg._all_dependencies # noqa: SLF001 + + def __setitem__( + self, itg: Integration, all_dependencies: set[str] | Exception + ) -> None: + itg._all_dependencies = all_dependencies # noqa: SLF001 + + +async def resolve_integrations_dependencies( + hass: HomeAssistant, integrations: Iterable[Integration] +) -> dict[str, set[str]]: + """Resolve all dependencies for integrations. + + Detects circular dependencies and missing integrations. + """ + resolved = _ResolveDependenciesCache() + + async def _resolve_deps_catch_exceptions(itg: Integration) -> set[str] | None: + try: + return await _do_resolve_dependencies(itg, cache=resolved) + except Exception as exc: # noqa: BLE001 + _LOGGER.error("Unable to resolve dependencies for %s: %s", itg.domain, exc) + return None + + resolve_dependencies_tasks = { + itg.domain: create_eager_task( + _resolve_deps_catch_exceptions(itg), + name=f"resolve dependencies {itg.domain}", + loop=hass.loop, + ) + for itg in integrations + } + + result = await asyncio.gather(*resolve_dependencies_tasks.values()) + + return { + domain: deps + for domain, deps in zip(resolve_dependencies_tasks, result, strict=True) + if deps is not None + } + + +async def resolve_integrations_after_dependencies( + hass: HomeAssistant, + integrations: Iterable[Integration], + possible_after_dependencies: set[str] | None = None, + *, + ignore_exceptions: bool = False, +) -> dict[str, set[str]]: + """Resolve all dependencies, including after_dependencies, for integrations. + + Detects circular dependencies and missing integrations. + """ + resolved: dict[Integration, set[str] | Exception] = {} + + async def _resolve_deps_catch_exceptions(itg: Integration) -> set[str] | None: + try: + return await _do_resolve_dependencies( + itg, + cache=resolved, + possible_after_dependencies=possible_after_dependencies, + ignore_exceptions=ignore_exceptions, + ) + except Exception as exc: # noqa: BLE001 + _LOGGER.error( + "Unable to resolve (after) dependencies for %s: %s", itg.domain, exc + ) + return None + + resolve_dependencies_tasks = { + itg.domain: create_eager_task( + _resolve_deps_catch_exceptions(itg), + name=f"resolve after dependencies {itg.domain}", + loop=hass.loop, + ) + for itg in integrations + } + + result = await asyncio.gather(*resolve_dependencies_tasks.values()) + + return { + domain: deps + for domain, deps in zip(resolve_dependencies_tasks, result, strict=True) + if deps is not None + } + + +async def _do_resolve_dependencies( + itg: Integration, + *, + cache: _ResolveDependenciesCacheProtocol, + possible_after_dependencies: set[str] | None | UndefinedType = UNDEFINED, + ignore_exceptions: bool = False, +) -> set[str]: + """Recursively resolve all dependencies. + + Uses `cache` to cache the results. + + If `possible_after_dependencies` is not UNDEFINED, + listed after dependencies are also considered. + If `possible_after_dependencies` is None, + all the possible after dependencies are considered. + + If `ignore_exceptions` is True, exceptions are caught and ignored + and the normal resolution algorithm continues. + Otherwise, exceptions are raised. + """ + resolved = cache + resolving: set[str] = set() + + async def do_resolve_dependencies_impl(itg: Integration) -> set[str]: + domain = itg.domain + + # If it's already resolved, no point doing it again. + if (result := resolved.get(itg)) is not None: + if isinstance(result, Exception): + raise result + return result + + # If we are already resolving it, we have a circular dependency. + if domain in resolving: + if ignore_exceptions: + resolved[itg] = set() + return set() + exc = CircularDependency([domain]) + resolved[itg] = exc + raise exc + + resolving.add(domain) + + dependencies_domains = set(itg.dependencies) + if possible_after_dependencies is not UNDEFINED: + if possible_after_dependencies is None: + after_dependencies: Iterable[str] = itg.after_dependencies + else: + after_dependencies = ( + set(itg.after_dependencies) & possible_after_dependencies + ) + dependencies_domains.update(after_dependencies) + dependencies = await async_get_integrations(itg.hass, dependencies_domains) + + all_dependencies: set[str] = set() + for dep_domain, dep_integration in dependencies.items(): + if isinstance(dep_integration, Exception): + if ignore_exceptions: + continue + resolved[itg] = dep_integration + raise dep_integration + + all_dependencies.add(dep_domain) + + try: + dep_dependencies = await do_resolve_dependencies_impl(dep_integration) + except CircularDependency as exc: + exc.extend_cycle(domain) + resolved[itg] = exc + raise + except Exception as exc: + resolved[itg] = exc + raise + + all_dependencies.update(dep_dependencies) + + resolving.remove(domain) + + resolved[itg] = all_dependencies + return all_dependencies + + return await do_resolve_dependencies_impl(itg) + + class LoaderError(Exception): """Loader base error.""" @@ -1466,11 +1627,13 @@ class IntegrationNotLoaded(LoaderError): class CircularDependency(LoaderError): """Raised when a circular dependency is found when resolving components.""" - def __init__(self, from_domain: str | set[str], to_domain: str) -> None: + def __init__(self, domain_cycle: list[str]) -> None: """Initialize circular dependency error.""" - super().__init__(f"Circular dependency detected: {from_domain} -> {to_domain}.") - self.from_domain = from_domain - self.to_domain = to_domain + super().__init__("Circular dependency detected", domain_cycle) + + def extend_cycle(self, domain: str) -> None: + """Extend the cycle with the domain.""" + self.args[1].insert(0, domain) def _load_file( @@ -1624,50 +1787,6 @@ def bind_hass[_CallableT: Callable[..., Any]](func: _CallableT) -> _CallableT: return func -async def _async_component_dependencies( - hass: HomeAssistant, - integration: Integration, -) -> set[str]: - """Get component dependencies.""" - loading: set[str] = set() - loaded: set[str] = set() - - async def component_dependencies_impl(integration: Integration) -> None: - """Recursively get component dependencies.""" - domain = integration.domain - if not (dependencies := integration.dependencies): - loaded.add(domain) - return - - loading.add(domain) - dep_integrations = await async_get_integrations(hass, dependencies) - for dependency_domain, dep_integration in dep_integrations.items(): - if isinstance(dep_integration, Exception): - raise dep_integration - - # If we are already loading it, we have a circular dependency. - # We have to check it here to make sure that every integration that - # depends on us, does not appear in our own after_dependencies. - if conflict := loading.intersection(dep_integration.after_dependencies): - raise CircularDependency(conflict, dependency_domain) - - # If we have already loaded it, no point doing it again. - if dependency_domain in loaded: - continue - - # If we are already loading it, we have a circular dependency. - if dependency_domain in loading: - raise CircularDependency(dependency_domain, domain) - - await component_dependencies_impl(dep_integration) - loading.remove(domain) - loaded.add(domain) - - await component_dependencies_impl(integration) - - return loaded - - def _async_mount_config_dir(hass: HomeAssistant) -> None: """Mount config dir in order to load custom_component. diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bce7a2ddcdd..d1e91fd8604 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -6,10 +6,11 @@ aiodns==3.2.0 aiohasupervisor==0.3.0 aiohttp-asyncmdnsresolver==0.1.1 aiohttp-fast-zlib==0.2.3 -aiohttp==3.11.13 +aiohttp==3.11.14 aiohttp_cors==0.7.0 aiousbwatcher==1.1.1 aiozoneinfo==0.2.3 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 async-upnp-client==0.43.0 @@ -22,29 +23,29 @@ bcrypt==4.2.0 bleak-retry-connector==3.9.0 bleak==0.22.3 bluetooth-adapters==0.21.4 -bluetooth-auto-recovery==1.4.4 -bluetooth-data-tools==1.25.0 +bluetooth-auto-recovery==1.4.5 +bluetooth-data-tools==1.26.1 cached-ipaddress==0.10.0 certifi>=2021.5.30 ciso8601==2.3.2 cronsim==2.6 cryptography==44.0.1 -dbus-fast==2.39.3 +dbus-fast==2.43.0 fnv-hash-fast==1.4.0 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 -habluetooth==3.25.0 +habluetooth==3.37.0 hass-nabucasa==0.94.0 hassil==2.2.3 home-assistant-bluetooth==1.13.1 -home-assistant-frontend==20250306.0 -home-assistant-intents==2025.3.5 +home-assistant-frontend==20250326.0 +home-assistant-intents==2025.3.24 httpx==0.28.1 ifaddr==0.2.0 Jinja2==3.1.6 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.15 +orjson==3.10.16 packaging>=23.1 paho-mqtt==2.1.0 Pillow==11.1.0 @@ -61,13 +62,13 @@ PyTurboJPEG==1.7.5 PyYAML==6.0.2 requests==2.32.3 securetar==2025.2.1 -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 +typing-extensions>=4.13.0,<5.0 ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.1 +uv==0.6.10 voluptuous-openapi==0.0.6 voluptuous-serialize==2.6.0 voluptuous==0.15.2 @@ -108,7 +109,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.8.0 +anyio==4.9.0 h11==0.14.0 httpcore==1.0.7 diff --git a/homeassistant/scripts/check_config.py b/homeassistant/scripts/check_config.py index a24568e9a6f..ca3df5080b5 100644 --- a/homeassistant/scripts/check_config.py +++ b/homeassistant/scripts/check_config.py @@ -12,6 +12,9 @@ import os from typing import Any from unittest.mock import patch +from annotatedyaml import loader as yaml_loader +from annotatedyaml.loader import Secrets + from homeassistant import core, loader from homeassistant.config import get_default_config_dir from homeassistant.config_entries import ConfigEntries @@ -23,7 +26,6 @@ from homeassistant.helpers import ( issue_registry as ir, ) from homeassistant.helpers.check_config import async_check_ha_config_file -from homeassistant.util.yaml import Secrets, loader as yaml_loader # mypy: allow-untyped-calls, allow-untyped-defs @@ -31,9 +33,9 @@ REQUIREMENTS = ("colorlog==6.8.2",) _LOGGER = logging.getLogger(__name__) MOCKS: dict[str, tuple[str, Callable]] = { - "load": ("homeassistant.util.yaml.loader.load_yaml", yaml_loader.load_yaml), + "load": ("annotatedyaml.loader.load_yaml", yaml_loader.load_yaml), "load*": ("homeassistant.config.load_yaml_dict", yaml_loader.load_yaml_dict), - "secrets": ("homeassistant.util.yaml.loader.secret_yaml", yaml_loader.secret_yaml), + "secrets": ("annotatedyaml.loader.secret_yaml", yaml_loader.secret_yaml), } PATCHES: dict[str, Any] = {} diff --git a/homeassistant/setup.py b/homeassistant/setup.py index dc4d0988b91..9572136559a 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -323,7 +323,7 @@ async def _async_setup_component( translation.async_load_integrations(hass, integration_set), loop=hass.loop ) # Validate all dependencies exist and there are no circular dependencies - if not await integration.resolve_dependencies(): + if await integration.resolve_dependencies() is None: return False # Process requirements as soon as possible, so we can import the component diff --git a/homeassistant/util/frozen_dataclass_compat.py b/homeassistant/util/frozen_dataclass_compat.py index 81ce9961a0b..518515d4f85 100644 --- a/homeassistant/util/frozen_dataclass_compat.py +++ b/homeassistant/util/frozen_dataclass_compat.py @@ -63,7 +63,7 @@ class FrozenOrThawed(type): ) def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, name: str, bases: tuple[type, ...], namespace: dict[Any, Any], diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index 2c4eb744614..1e516742bfe 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -2,14 +2,16 @@ from __future__ import annotations +from collections import defaultdict from collections.abc import Callable, Coroutine from functools import partial, wraps import inspect import logging import logging.handlers -import queue +from queue import SimpleQueue +import time import traceback -from typing import Any, cast, overload +from typing import Any, cast, overload, override from homeassistant.core import ( HassJobType, @@ -18,6 +20,59 @@ from homeassistant.core import ( get_hassjob_callable_job_type, ) +_LOGGER = logging.getLogger(__name__) + + +class HomeAssistantQueueListener(logging.handlers.QueueListener): + """Custom QueueListener to watch for noisy loggers.""" + + LOG_COUNTS_RESET_INTERVAL = 300 + MAX_LOGS_COUNT = 200 + + _last_reset: float + _log_counts: dict[str, int] + _warned_modules: set[str] + + def __init__( + self, queue: SimpleQueue[logging.Handler], *handlers: logging.Handler + ) -> None: + """Initialize the handler.""" + super().__init__(queue, *handlers) + self._warned_modules = set() + self._reset_counters(time.time()) + + @override + def handle(self, record: logging.LogRecord) -> None: + """Handle the record.""" + super().handle(record) + + if record.levelno < logging.INFO: + return + + if (record.created - self._last_reset) > self.LOG_COUNTS_RESET_INTERVAL: + self._reset_counters(record.created) + + module_name = record.name + if module_name == __name__ or module_name in self._warned_modules: + return + + self._log_counts[module_name] += 1 + module_count = self._log_counts[module_name] + if module_count < self.MAX_LOGS_COUNT: + return + + _LOGGER.warning( + "Module %s is logging too frequently. %d messages since last count", + module_name, + module_count, + ) + self._warned_modules.add(module_name) + + def _reset_counters(self, time_sec: float) -> None: + _LOGGER.debug("Resetting log counters") + self._last_reset = time_sec + self._log_counts = defaultdict(int) + class HomeAssistantQueueHandler(logging.handlers.QueueHandler): """Process the log in another thread.""" @@ -60,7 +115,7 @@ def async_activate_log_queue_handler(hass: HomeAssistant) -> None: This allows us to avoid blocking I/O and formatting messages in the event loop as log messages are written in another thread. """ - simple_queue: queue.SimpleQueue[logging.Handler] = queue.SimpleQueue() + simple_queue: SimpleQueue[logging.Handler] = SimpleQueue() queue_handler = HomeAssistantQueueHandler(simple_queue) logging.root.addHandler(queue_handler) @@ -71,7 +126,7 @@ def async_activate_log_queue_handler(hass: HomeAssistant) -> None: logging.root.removeHandler(handler) migrated_handlers.append(handler) - listener = logging.handlers.QueueListener(simple_queue, *migrated_handlers) + listener = HomeAssistantQueueListener(simple_queue, *migrated_handlers) queue_handler.listener = listener listener.start() diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index 15993cbae47..055f435503f 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from numbers import Number from typing import TYPE_CHECKING, Final @@ -82,9 +83,21 @@ def _is_valid_unit(unit: str, unit_type: str) -> bool: return False +@dataclass(frozen=True, kw_only=True) class UnitSystem: """A container for units of measure.""" + _name: str + accumulated_precipitation_unit: UnitOfPrecipitationDepth + area_unit: UnitOfArea + length_unit: UnitOfLength + mass_unit: UnitOfMass + pressure_unit: UnitOfPressure + temperature_unit: UnitOfTemperature + volume_unit: UnitOfVolume + wind_speed_unit: UnitOfSpeed + _conversions: dict[tuple[SensorDeviceClass | str | None, str | None], str] + def __init__( self, name: str, @@ -118,16 +131,16 @@ class UnitSystem: if errors: raise ValueError(errors) - self._name = name - self.accumulated_precipitation_unit = accumulated_precipitation - self.area_unit = area - self.length_unit = length - self.mass_unit = mass - self.pressure_unit = pressure - self.temperature_unit = temperature - self.volume_unit = volume - self.wind_speed_unit = wind_speed - self._conversions = conversions + super().__setattr__("_name", name) + super().__setattr__("accumulated_precipitation_unit", accumulated_precipitation) + super().__setattr__("area_unit", area) + super().__setattr__("length_unit", length) + super().__setattr__("mass_unit", mass) + super().__setattr__("pressure_unit", pressure) + super().__setattr__("temperature_unit", temperature) + super().__setattr__("volume_unit", volume) + super().__setattr__("wind_speed_unit", wind_speed) + super().__setattr__("_conversions", conversions) def temperature(self, temperature: float, from_unit: str) -> float: """Convert the given temperature to this unit system.""" diff --git a/homeassistant/util/yaml/__init__.py b/homeassistant/util/yaml/__init__.py index 3b1f5c4cc0a..323383ef53f 100644 --- a/homeassistant/util/yaml/__init__.py +++ b/homeassistant/util/yaml/__init__.py @@ -1,17 +1,10 @@ """YAML utility functions.""" -from .const import SECRET_YAML +from annotatedyaml import SECRET_YAML, Input, YamlTypeError +from annotatedyaml.input import UndefinedSubstitution, extract_inputs, substitute + from .dumper import dump, save_yaml -from .input import UndefinedSubstitution, extract_inputs, substitute -from .loader import ( - Secrets, - YamlTypeError, - load_yaml, - load_yaml_dict, - parse_yaml, - secret_yaml, -) -from .objects import Input +from .loader import Secrets, load_yaml, load_yaml_dict, parse_yaml, secret_yaml __all__ = [ "SECRET_YAML", diff --git a/homeassistant/util/yaml/const.py b/homeassistant/util/yaml/const.py deleted file mode 100644 index 811c7d149f7..00000000000 --- a/homeassistant/util/yaml/const.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Constants.""" - -SECRET_YAML = "secrets.yaml" diff --git a/homeassistant/util/yaml/dumper.py b/homeassistant/util/yaml/dumper.py index 61772b6989d..059be2c1c5b 100644 --- a/homeassistant/util/yaml/dumper.py +++ b/homeassistant/util/yaml/dumper.py @@ -1,96 +1,5 @@ """Custom dumper and representers.""" -from collections import OrderedDict -from typing import Any +from annotatedyaml.dumper import add_representer, dump, represent_odict, save_yaml -import yaml - -from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass - -# mypy: allow-untyped-calls, no-warn-return-any - - -try: - from yaml import CSafeDumper as FastestAvailableSafeDumper -except ImportError: - from yaml import ( # type: ignore[assignment] - SafeDumper as FastestAvailableSafeDumper, - ) - - -def dump(_dict: dict | list) -> str: - """Dump YAML to a string and remove null.""" - return yaml.dump( - _dict, - default_flow_style=False, - allow_unicode=True, - sort_keys=False, - Dumper=FastestAvailableSafeDumper, - ).replace(": null\n", ":\n") - - -def save_yaml(path: str, data: dict) -> None: - """Save YAML to a file.""" - # Dump before writing to not truncate the file if dumping fails - str_data = dump(data) - with open(path, "w", encoding="utf-8") as outfile: - outfile.write(str_data) - - -# From: https://gist.github.com/miracle2k/3184458 -def represent_odict( # type: ignore[no-untyped-def] - dumper, tag, mapping, flow_style=None -) -> yaml.MappingNode: - """Like BaseRepresenter.represent_mapping but does not issue the sort().""" - value: list = [] - node = yaml.MappingNode(tag, value, flow_style=flow_style) - if dumper.alias_key is not None: - dumper.represented_objects[dumper.alias_key] = node - best_style = True - if hasattr(mapping, "items"): - mapping = mapping.items() - for item_key, item_value in mapping: - node_key = dumper.represent_data(item_key) - node_value = dumper.represent_data(item_value) - if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if dumper.default_flow_style is not None: - node.flow_style = dumper.default_flow_style - else: - node.flow_style = best_style - return node - - -def add_representer(klass: Any, representer: Any) -> None: - """Add to representer to the dumper.""" - FastestAvailableSafeDumper.add_representer(klass, representer) - - -add_representer( - OrderedDict, - lambda dumper, value: represent_odict(dumper, "tag:yaml.org,2002:map", value), -) - -add_representer( - NodeDictClass, - lambda dumper, value: represent_odict(dumper, "tag:yaml.org,2002:map", value), -) - -add_representer( - NodeListClass, - lambda dumper, value: dumper.represent_sequence("tag:yaml.org,2002:seq", value), -) - -add_representer( - NodeStrClass, - lambda dumper, value: dumper.represent_scalar("tag:yaml.org,2002:str", str(value)), -) - -add_representer( - Input, - lambda dumper, value: dumper.represent_scalar("!input", value.name), -) +__all__ = ["add_representer", "dump", "represent_odict", "save_yaml"] diff --git a/homeassistant/util/yaml/input.py b/homeassistant/util/yaml/input.py index ff9b37f18f1..5dad8a63ae5 100644 --- a/homeassistant/util/yaml/input.py +++ b/homeassistant/util/yaml/input.py @@ -2,55 +2,8 @@ from __future__ import annotations -from typing import Any +from annotatedyaml.input import UndefinedSubstitution, extract_inputs, substitute from .objects import Input - -class UndefinedSubstitution(Exception): - """Error raised when we find a substitution that is not defined.""" - - def __init__(self, input_name: str) -> None: - """Initialize the undefined substitution exception.""" - super().__init__(f"No substitution found for input {input_name}") - self.input = input - - -def extract_inputs(obj: Any) -> set[str]: - """Extract input from a structure.""" - found: set[str] = set() - _extract_inputs(obj, found) - return found - - -def _extract_inputs(obj: Any, found: set[str]) -> None: - """Extract input from a structure.""" - if isinstance(obj, Input): - found.add(obj.name) - return - - if isinstance(obj, list): - for val in obj: - _extract_inputs(val, found) - return - - if isinstance(obj, dict): - for val in obj.values(): - _extract_inputs(val, found) - return - - -def substitute(obj: Any, substitutions: dict[str, Any]) -> Any: - """Substitute values.""" - if isinstance(obj, Input): - if obj.name not in substitutions: - raise UndefinedSubstitution(obj.name) - return substitutions[obj.name] - - if isinstance(obj, list): - return [substitute(val, substitutions) for val in obj] - - if isinstance(obj, dict): - return {key: substitute(val, substitutions) for key, val in obj.items()} - - return obj +__all__ = ["Input", "UndefinedSubstitution", "extract_inputs", "substitute"] diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 3911d62040b..1f8338a1ff7 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -2,157 +2,37 @@ from __future__ import annotations -from collections.abc import Callable, Iterator -import fnmatch -from io import StringIO, TextIOWrapper -import logging +from io import StringIO import os -from pathlib import Path -from typing import Any, TextIO, overload +from typing import TextIO +from annotatedyaml import YAMLException, YamlTypeError +from annotatedyaml.loader import ( + HAS_C_LOADER, + JSON_TYPE, + LoaderType, + Secrets, + add_constructor, + load_yaml as load_annotated_yaml, + load_yaml_dict as load_annotated_yaml_dict, + parse_yaml as parse_annotated_yaml, + secret_yaml as annotated_secret_yaml, +) import yaml -try: - from yaml import CSafeLoader as FastestAvailableSafeLoader - - HAS_C_LOADER = True -except ImportError: - HAS_C_LOADER = False - from yaml import ( # type: ignore[assignment] - SafeLoader as FastestAvailableSafeLoader, - ) - -from propcache.api import cached_property - from homeassistant.exceptions import HomeAssistantError -from .const import SECRET_YAML -from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass - -# mypy: allow-untyped-calls, no-warn-return-any - -JSON_TYPE = list | dict | str - -_LOGGER = logging.getLogger(__name__) - - -class YamlTypeError(HomeAssistantError): - """Raised by load_yaml_dict if top level data is not a dict.""" - - -class Secrets: - """Store secrets while loading YAML.""" - - def __init__(self, config_dir: Path) -> None: - """Initialize secrets.""" - self.config_dir = config_dir - self._cache: dict[Path, dict[str, str]] = {} - - def get(self, requester_path: str, secret: str) -> str: - """Return the value of a secret.""" - current_path = Path(requester_path) - - secret_dir = current_path - while True: - secret_dir = secret_dir.parent - - try: - secret_dir.relative_to(self.config_dir) - except ValueError: - # We went above the config dir - break - - secrets = self._load_secret_yaml(secret_dir) - - if secret in secrets: - _LOGGER.debug( - "Secret %s retrieved from secrets.yaml in folder %s", - secret, - secret_dir, - ) - return secrets[secret] - - raise HomeAssistantError(f"Secret {secret} not defined") - - def _load_secret_yaml(self, secret_dir: Path) -> dict[str, str]: - """Load the secrets yaml from path.""" - if (secret_path := secret_dir / SECRET_YAML) in self._cache: - return self._cache[secret_path] - - _LOGGER.debug("Loading %s", secret_path) - try: - secrets = load_yaml(str(secret_path)) - - if not isinstance(secrets, dict): - raise HomeAssistantError("Secrets is not a dictionary") - - if "logger" in secrets: - logger = str(secrets["logger"]).lower() - if logger == "debug": - _LOGGER.setLevel(logging.DEBUG) - else: - _LOGGER.error( - ( - "Error in secrets.yaml: 'logger: debug' expected, but" - " 'logger: %s' found" - ), - logger, - ) - del secrets["logger"] - except FileNotFoundError: - secrets = {} - - self._cache[secret_path] = secrets - - return secrets - - -class _LoaderMixin: - """Mixin class with extensions for YAML loader.""" - - name: str - stream: Any - - @cached_property - def get_name(self) -> str: - """Get the name of the loader.""" - return self.name - - @cached_property - def get_stream_name(self) -> str: - """Get the name of the stream.""" - return getattr(self.stream, "name", "") - - -class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): - """The fastest available safe loader, either C or Python.""" - - def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: - """Initialize a safe line loader.""" - self.stream = stream - - # Set name in same way as the Python loader does in yaml.reader.__init__ - if isinstance(stream, str): - self.name = "" - elif isinstance(stream, bytes): - self.name = "" - else: - self.name = getattr(stream, "name", "") - - super().__init__(stream) - self.secrets = secrets - - -class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): - """Python safe loader.""" - - def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: - """Initialize a safe line loader.""" - super().__init__(stream) - self.secrets = secrets - - -type LoaderType = FastSafeLoader | PythonSafeLoader +__all__ = [ + "HAS_C_LOADER", + "JSON_TYPE", + "Secrets", + "YamlTypeError", + "add_constructor", + "load_yaml", + "load_yaml_dict", + "parse_yaml", + "secret_yaml", +] def load_yaml( @@ -164,15 +44,9 @@ def load_yaml( except for FileNotFoundError which will be re-raised. """ try: - with open(fname, encoding="utf-8") as conf_file: - return parse_yaml(conf_file, secrets) - except UnicodeDecodeError as exc: - _LOGGER.error("Unable to read file %s: %s", fname, exc) - raise HomeAssistantError(exc) from exc - except FileNotFoundError: - raise - except OSError as exc: - raise HomeAssistantError(exc) from exc + return load_annotated_yaml(fname, secrets) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def load_yaml_dict( @@ -183,320 +57,27 @@ def load_yaml_dict( Raise if the top level is not a dict. Return an empty dict if the file is empty. """ - loaded_yaml = load_yaml(fname, secrets) - if loaded_yaml is None: - loaded_yaml = {} - if not isinstance(loaded_yaml, dict): - raise YamlTypeError(f"YAML file {fname} does not contain a dict") - return loaded_yaml + try: + return load_annotated_yaml_dict(fname, secrets) + except YamlTypeError: + raise + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def parse_yaml( content: str | TextIO | StringIO, secrets: Secrets | None = None ) -> JSON_TYPE: """Parse YAML with the fastest available loader.""" - if not HAS_C_LOADER: - return _parse_yaml_python(content, secrets) try: - return _parse_yaml(FastSafeLoader, content, secrets) - except yaml.YAMLError: - # Loading failed, so we now load with the Python loader which has more - # readable exceptions - if isinstance(content, (StringIO, TextIO, TextIOWrapper)): - # Rewind the stream so we can try again - content.seek(0, 0) - return _parse_yaml_python(content, secrets) - - -def _parse_yaml_python( - content: str | TextIO | StringIO, secrets: Secrets | None = None -) -> JSON_TYPE: - """Parse YAML with the python loader (this is very slow).""" - try: - return _parse_yaml(PythonSafeLoader, content, secrets) - except yaml.YAMLError as exc: - _LOGGER.error(str(exc)) - raise HomeAssistantError(exc) from exc - - -def _parse_yaml( - loader: type[FastSafeLoader | PythonSafeLoader], - content: str | TextIO, - secrets: Secrets | None = None, -) -> JSON_TYPE: - """Load a YAML file.""" - return yaml.load(content, Loader=lambda stream: loader(stream, secrets)) # type: ignore[arg-type] - - -@overload -def _add_reference( - obj: list | NodeListClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeListClass: ... - - -@overload -def _add_reference( - obj: str | NodeStrClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeStrClass: ... - - -@overload -def _add_reference( - obj: dict | NodeDictClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: ... - - -def _add_reference( - obj: dict | list | str | NodeDictClass | NodeListClass | NodeStrClass, - loader: LoaderType, - node: yaml.nodes.Node, -) -> NodeDictClass | NodeListClass | NodeStrClass: - """Add file reference information to an object.""" - if isinstance(obj, list): - obj = NodeListClass(obj) - elif isinstance(obj, str): - obj = NodeStrClass(obj) - elif isinstance(obj, dict): - obj = NodeDictClass(obj) - return _add_reference_to_node_class(obj, loader, node) - - -@overload -def _add_reference_to_node_class( - obj: NodeListClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeListClass: ... - - -@overload -def _add_reference_to_node_class( - obj: NodeStrClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeStrClass: ... - - -@overload -def _add_reference_to_node_class( - obj: NodeDictClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: ... - - -def _add_reference_to_node_class( - obj: NodeDictClass | NodeListClass | NodeStrClass, - loader: LoaderType, - node: yaml.nodes.Node, -) -> NodeDictClass | NodeListClass | NodeStrClass: - """Add file reference information to a node class object.""" - try: # suppress is much slower - obj.__config_file__ = loader.get_name - obj.__line__ = node.start_mark.line + 1 - except AttributeError: - pass - return obj - - -def _raise_if_no_value[NodeT: yaml.nodes.Node, _R]( - func: Callable[[LoaderType, NodeT], _R], -) -> Callable[[LoaderType, NodeT], _R]: - def wrapper(loader: LoaderType, node: NodeT) -> _R: - if not node.value: - raise HomeAssistantError( - f"{node.start_mark}: {node.tag} needs an argument." - ) - return func(loader, node) - - return wrapper - - -@_raise_if_no_value -def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: - """Load another YAML file and embed it using the !include tag. - - Example: - device_tracker: !include device_tracker.yaml - - """ - fname = os.path.join(os.path.dirname(loader.get_name), node.value) - try: - loaded_yaml = load_yaml(fname, loader.secrets) - if loaded_yaml is None: - loaded_yaml = NodeDictClass() - return _add_reference(loaded_yaml, loader, node) - except FileNotFoundError as exc: - raise HomeAssistantError( - f"{node.start_mark}: Unable to read file {fname}" - ) from exc - - -def _is_file_valid(name: str) -> bool: - """Decide if a file is valid.""" - return not name.startswith(".") - - -def _find_files(directory: str, pattern: str) -> Iterator[str]: - """Recursively load files in a directory.""" - for root, dirs, files in os.walk(directory, topdown=True): - dirs[:] = [d for d in dirs if _is_file_valid(d)] - for basename in sorted(files): - if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) - yield filename - - -@_raise_if_no_value -def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDictClass: - """Load multiple files from directory as a dictionary.""" - mapping = NodeDictClass() - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - for fname in _find_files(loc, "*.yaml"): - filename = os.path.splitext(os.path.basename(fname))[0] - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if loaded_yaml is None: - # Special case, an empty file included by !include_dir_named is treated - # as an empty dictionary - loaded_yaml = NodeDictClass() - mapping[filename] = loaded_yaml - return _add_reference_to_node_class(mapping, loader, node) - - -@_raise_if_no_value -def _include_dir_merge_named_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: - """Load multiple files from directory as a merged dictionary.""" - mapping = NodeDictClass() - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - for fname in _find_files(loc, "*.yaml"): - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if isinstance(loaded_yaml, dict): - mapping.update(loaded_yaml) - return _add_reference_to_node_class(mapping, loader, node) - - -@_raise_if_no_value -def _include_dir_list_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> list[JSON_TYPE]: - """Load multiple files from directory as a list.""" - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - return [ - loaded_yaml - for f in _find_files(loc, "*.yaml") - if os.path.basename(f) != SECRET_YAML - and (loaded_yaml := load_yaml(f, loader.secrets)) is not None - ] - - -@_raise_if_no_value -def _include_dir_merge_list_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> JSON_TYPE: - """Load multiple files from directory as a merged list.""" - loc: str = os.path.join(os.path.dirname(loader.get_name), node.value) - merged_list: list[JSON_TYPE] = [] - for fname in _find_files(loc, "*.yaml"): - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if isinstance(loaded_yaml, list): - merged_list.extend(loaded_yaml) - return _add_reference(merged_list, loader, node) - - -def _handle_mapping_tag( - loader: LoaderType, node: yaml.nodes.MappingNode -) -> NodeDictClass: - """Load YAML mappings into an ordered dictionary to preserve key order.""" - loader.flatten_mapping(node) - nodes = loader.construct_pairs(node) - - seen: dict = {} - for (key, _), (child_node, _) in zip(nodes, node.value, strict=False): - line = child_node.start_mark.line - - try: - hash(key) - except TypeError as exc: - fname = loader.get_stream_name - raise yaml.MarkedYAMLError( - context=f'invalid key: "{key}"', - context_mark=yaml.Mark( - fname, - 0, - line, - -1, - None, - None, # type: ignore[arg-type] - ), - ) from exc - - if key in seen: - fname = loader.get_stream_name - _LOGGER.warning( - 'YAML file %s contains duplicate key "%s". Check lines %d and %d', - fname, - key, - seen[key], - line, - ) - seen[key] = line - - return _add_reference_to_node_class(NodeDictClass(nodes), loader, node) - - -def _construct_seq(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: - """Add line number and file name to Load YAML sequence.""" - (obj,) = loader.construct_yaml_seq(node) - return _add_reference(obj, loader, node) - - -def _handle_scalar_tag( - loader: LoaderType, node: yaml.nodes.ScalarNode -) -> str | int | float | None: - """Add line number and file name to Load YAML sequence.""" - obj = node.value - if not isinstance(obj, str): - return obj - return _add_reference_to_node_class(NodeStrClass(obj), loader, node) - - -def _env_var_yaml(loader: LoaderType, node: yaml.nodes.Node) -> str: - """Load environment variables and embed it into the configuration YAML.""" - args = node.value.split() - - # Check for a default value - if len(args) > 1: - return os.getenv(args[0], " ".join(args[1:])) - if args[0] in os.environ: - return os.environ[args[0]] - _LOGGER.error("Environment variable %s not defined", node.value) - raise HomeAssistantError(node.value) + return parse_annotated_yaml(content, secrets) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def secret_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load secrets and embed it into the configuration YAML.""" - if loader.secrets is None: - raise HomeAssistantError("Secrets not supported in this YAML file") - - return loader.secrets.get(loader.get_name, node.value) - - -def add_constructor(tag: Any, constructor: Any) -> None: - """Add to constructor to all loaders.""" - for yaml_loader in (FastSafeLoader, PythonSafeLoader): - yaml_loader.add_constructor(tag, constructor) - - -add_constructor("!include", _include_yaml) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _handle_mapping_tag) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_SCALAR_TAG, _handle_scalar_tag) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG, _construct_seq) -add_constructor("!env_var", _env_var_yaml) -add_constructor("!secret", secret_yaml) -add_constructor("!include_dir_list", _include_dir_list_yaml) -add_constructor("!include_dir_merge_list", _include_dir_merge_list_yaml) -add_constructor("!include_dir_named", _include_dir_named_yaml) -add_constructor("!include_dir_merge_named", _include_dir_merge_named_yaml) -add_constructor("!input", Input.from_node) + try: + return annotated_secret_yaml(loader, node) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index 7e4019331c6..26714b0fdd4 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -2,52 +2,6 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import Any +from annotatedyaml import Input, NodeDictClass, NodeListClass, NodeStrClass -import voluptuous as vol -from voluptuous.schema_builder import _compile_scalar -import yaml - - -class NodeListClass(list): - """Wrapper class to be able to add attributes on a list.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - -class NodeStrClass(str): - """Wrapper class to be able to add attributes on a string.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - def __voluptuous_compile__(self, schema: vol.Schema) -> Any: - """Needed because vol.Schema.compile does not handle str subclasses.""" - return _compile_scalar(self) # type: ignore[no-untyped-call] - - -class NodeDictClass(dict): - """Wrapper class to be able to add attributes on a dict.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - -@dataclass(slots=True, frozen=True) -class Input: - """Input that should be substituted.""" - - name: str - - @classmethod - def from_node(cls, loader: yaml.Loader, node: yaml.nodes.Node) -> Input: - """Create a new placeholder from a node.""" - return cls(node.value) +__all__ = ["Input", "NodeDictClass", "NodeListClass", "NodeStrClass"] diff --git a/mypy.ini b/mypy.ini index 520fad7d738..9831a183ec4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -945,6 +945,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bosch_alarm.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.braviatv.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3876,6 +3886,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.remote_calendar.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.renault.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index 09c14cbde69..0a56de0f6f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,97 +1,98 @@ [build-system] -requires = ["setuptools==75.1.0"] +requires = ["setuptools==77.0.3"] build-backend = "setuptools.build_meta" [project] -name = "homeassistant" -version = "2025.4.0.dev0" -license = {text = "Apache-2.0"} +name = "homeassistant" +version = "2025.5.0.dev0" +license = "Apache-2.0" +license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"] description = "Open-source home automation platform running on Python 3." -readme = "README.rst" -authors = [ - {name = "The Home Assistant Authors", email = "hello@home-assistant.io"} +readme = "README.rst" +authors = [ + { name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, ] -keywords = ["home", "automation"] +keywords = ["home", "automation"] classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: End Users/Desktop", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3.13", - "Topic :: Home Automation", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: End Users/Desktop", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.13", + "Topic :: Home Automation", ] requires-python = ">=3.13.0" -dependencies = [ - "aiodns==3.2.0", - # Integrations may depend on hassio integration without listing it to - # change behavior based on presence of supervisor. Deprecated with #127228 - # Lib can be removed with 2025.11 - "aiohasupervisor==0.3.0", - "aiohttp==3.11.13", - "aiohttp_cors==0.7.0", - "aiohttp-fast-zlib==0.2.3", - "aiohttp-asyncmdnsresolver==0.1.1", - "aiozoneinfo==0.2.3", - "astral==2.2", - "async-interrupt==1.2.2", - "attrs==25.1.0", - "atomicwrites-homeassistant==1.4.1", - "audioop-lts==0.2.1", - "awesomeversion==24.6.0", - "bcrypt==4.2.0", - "certifi>=2021.5.30", - "ciso8601==2.3.2", - "cronsim==2.6", - "fnv-hash-fast==1.4.0", - # hass-nabucasa is imported by helpers which don't depend on the cloud - # integration - "hass-nabucasa==0.94.0", - # When bumping httpx, please check the version pins of - # httpcore, anyio, and h11 in gen_requirements_all - "httpx==0.28.1", - "home-assistant-bluetooth==1.13.1", - "ifaddr==0.2.0", - "Jinja2==3.1.6", - "lru-dict==1.3.0", - "PyJWT==2.10.1", - # PyJWT has loose dependency. We want the latest one. - "cryptography==44.0.1", - "Pillow==11.1.0", - "propcache==0.3.0", - "pyOpenSSL==25.0.0", - "orjson==3.10.15", - "packaging>=23.1", - "psutil-home-assistant==0.0.1", - "python-slugify==8.0.4", - "PyYAML==6.0.2", - "requests==2.32.3", - "securetar==2025.2.1", - "SQLAlchemy==2.0.38", - "standard-aifc==3.13.0", - "standard-telnetlib==3.13.0", - "typing-extensions>=4.12.2,<5.0", - "ulid-transform==1.4.0", - # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 - # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 - # https://github.com/home-assistant/core/issues/97248 - "urllib3>=1.26.5,<2", - "uv==0.6.1", - "voluptuous==0.15.2", - "voluptuous-serialize==2.6.0", - "voluptuous-openapi==0.0.6", - "yarl==1.18.3", - "webrtc-models==0.3.0", - "zeroconf==0.146.0" +dependencies = [ + "aiodns==3.2.0", + # Integrations may depend on hassio integration without listing it to + # change behavior based on presence of supervisor. Deprecated with #127228 + # Lib can be removed with 2025.11 + "aiohasupervisor==0.3.0", + "aiohttp==3.11.14", + "aiohttp_cors==0.7.0", + "aiohttp-fast-zlib==0.2.3", + "aiohttp-asyncmdnsresolver==0.1.1", + "aiozoneinfo==0.2.3", + "annotatedyaml==0.4.5", + "astral==2.2", + "async-interrupt==1.2.2", + "attrs==25.1.0", + "atomicwrites-homeassistant==1.4.1", + "audioop-lts==0.2.1", + "awesomeversion==24.6.0", + "bcrypt==4.2.0", + "certifi>=2021.5.30", + "ciso8601==2.3.2", + "cronsim==2.6", + "fnv-hash-fast==1.4.0", + # hass-nabucasa is imported by helpers which don't depend on the cloud + # integration + "hass-nabucasa==0.94.0", + # When bumping httpx, please check the version pins of + # httpcore, anyio, and h11 in gen_requirements_all + "httpx==0.28.1", + "home-assistant-bluetooth==1.13.1", + "ifaddr==0.2.0", + "Jinja2==3.1.6", + "lru-dict==1.3.0", + "PyJWT==2.10.1", + # PyJWT has loose dependency. We want the latest one. + "cryptography==44.0.1", + "Pillow==11.1.0", + "propcache==0.3.0", + "pyOpenSSL==25.0.0", + "orjson==3.10.16", + "packaging>=23.1", + "psutil-home-assistant==0.0.1", + "python-slugify==8.0.4", + "PyYAML==6.0.2", + "requests==2.32.3", + "securetar==2025.2.1", + "SQLAlchemy==2.0.39", + "standard-aifc==3.13.0", + "standard-telnetlib==3.13.0", + "typing-extensions>=4.13.0,<5.0", + "ulid-transform==1.4.0", + # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 + # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 + # https://github.com/home-assistant/core/issues/97248 + "urllib3>=1.26.5,<2", + "uv==0.6.10", + "voluptuous==0.15.2", + "voluptuous-serialize==2.6.0", + "voluptuous-openapi==0.0.6", + "yarl==1.18.3", + "webrtc-models==0.3.0", + "zeroconf==0.146.0", ] [project.urls] -"Homepage" = "https://www.home-assistant.io/" +"Homepage" = "https://www.home-assistant.io/" "Source Code" = "https://github.com/home-assistant/core" "Bug Reports" = "https://github.com/home-assistant/core/issues" -"Docs: Dev" = "https://developers.home-assistant.io/" -"Discord" = "https://www.home-assistant.io/join-chat/" -"Forum" = "https://community.home-assistant.io/" +"Docs: Dev" = "https://developers.home-assistant.io/" +"Discord" = "https://www.home-assistant.io/join-chat/" +"Forum" = "https://community.home-assistant.io/" [project.scripts] hass = "homeassistant.__main__:main" @@ -118,30 +119,28 @@ init-hook = """\ ) \ """ load-plugins = [ - "pylint.extensions.code_style", - "pylint.extensions.typing", - "hass_decorator", - "hass_enforce_class_module", - "hass_enforce_sorted_platforms", - "hass_enforce_super_call", - "hass_enforce_type_hints", - "hass_inheritance", - "hass_imports", - "hass_logger", - "pylint_per_file_ignores", + "pylint.extensions.code_style", + "pylint.extensions.typing", + "hass_decorator", + "hass_enforce_class_module", + "hass_enforce_sorted_platforms", + "hass_enforce_super_call", + "hass_enforce_type_hints", + "hass_inheritance", + "hass_imports", + "hass_logger", + "pylint_per_file_ignores", ] persistent = false extension-pkg-allow-list = [ - "av.audio.stream", - "av.logging", - "av.stream", - "ciso8601", - "orjson", - "cv2", -] -fail-on = [ - "I", + "av.audio.stream", + "av.logging", + "av.stream", + "ciso8601", + "orjson", + "cv2", ] +fail-on = ["I"] [tool.pylint.BASIC] class-const-naming-style = "any" @@ -166,257 +165,257 @@ class-const-naming-style = "any" # consider-using-namedtuple-or-dataclass - too opinionated # consider-using-assignment-expr - decision to use := better left to devs disable = [ - "format", - "abstract-method", - "cyclic-import", - "duplicate-code", - "inconsistent-return-statements", - "locally-disabled", - "not-context-manager", - "too-few-public-methods", - "too-many-ancestors", - "too-many-arguments", - "too-many-instance-attributes", - "too-many-lines", - "too-many-locals", - "too-many-public-methods", - "too-many-boolean-expressions", - "too-many-positional-arguments", - "wrong-import-order", - "consider-using-namedtuple-or-dataclass", - "consider-using-assignment-expr", - "possibly-used-before-assignment", + "format", + "abstract-method", + "cyclic-import", + "duplicate-code", + "inconsistent-return-statements", + "locally-disabled", + "not-context-manager", + "too-few-public-methods", + "too-many-ancestors", + "too-many-arguments", + "too-many-instance-attributes", + "too-many-lines", + "too-many-locals", + "too-many-public-methods", + "too-many-boolean-expressions", + "too-many-positional-arguments", + "wrong-import-order", + "consider-using-namedtuple-or-dataclass", + "consider-using-assignment-expr", + "possibly-used-before-assignment", - # Handled by ruff - # Ref: - "await-outside-async", # PLE1142 - "bad-str-strip-call", # PLE1310 - "bad-string-format-type", # PLE1307 - "bidirectional-unicode", # PLE2502 - "continue-in-finally", # PLE0116 - "duplicate-bases", # PLE0241 - "misplaced-bare-raise", # PLE0704 - "format-needs-mapping", # F502 - "function-redefined", # F811 - # Needed because ruff does not understand type of __all__ generated by a function - # "invalid-all-format", # PLE0605 - "invalid-all-object", # PLE0604 - "invalid-character-backspace", # PLE2510 - "invalid-character-esc", # PLE2513 - "invalid-character-nul", # PLE2514 - "invalid-character-sub", # PLE2512 - "invalid-character-zero-width-space", # PLE2515 - "logging-too-few-args", # PLE1206 - "logging-too-many-args", # PLE1205 - "missing-format-string-key", # F524 - "mixed-format-string", # F506 - "no-method-argument", # N805 - "no-self-argument", # N805 - "nonexistent-operator", # B002 - "nonlocal-without-binding", # PLE0117 - "not-in-loop", # F701, F702 - "notimplemented-raised", # F901 - "return-in-init", # PLE0101 - "return-outside-function", # F706 - "syntax-error", # E999 - "too-few-format-args", # F524 - "too-many-format-args", # F522 - "too-many-star-expressions", # F622 - "truncated-format-string", # F501 - "undefined-all-variable", # F822 - "undefined-variable", # F821 - "used-prior-global-declaration", # PLE0118 - "yield-inside-async-function", # PLE1700 - "yield-outside-function", # F704 - "anomalous-backslash-in-string", # W605 - "assert-on-string-literal", # PLW0129 - "assert-on-tuple", # F631 - "bad-format-string", # W1302, F - "bad-format-string-key", # W1300, F - "bare-except", # E722 - "binary-op-exception", # PLW0711 - "cell-var-from-loop", # B023 - # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work - "duplicate-except", # B014 - "duplicate-key", # F601 - "duplicate-string-formatting-argument", # F - "duplicate-value", # F - "eval-used", # S307 - "exec-used", # S102 - "expression-not-assigned", # B018 - "f-string-without-interpolation", # F541 - "forgotten-debug-statement", # T100 - "format-string-without-interpolation", # F - # "global-statement", # PLW0603, ruff catches new occurrences, needs more work - "global-variable-not-assigned", # PLW0602 - "implicit-str-concat", # ISC001 - "import-self", # PLW0406 - "inconsistent-quotes", # Q000 - "invalid-envvar-default", # PLW1508 - "keyword-arg-before-vararg", # B026 - "logging-format-interpolation", # G - "logging-fstring-interpolation", # G - "logging-not-lazy", # G - "misplaced-future", # F404 - "named-expr-without-context", # PLW0131 - "nested-min-max", # PLW3301 - "pointless-statement", # B018 - "raise-missing-from", # B904 - "redefined-builtin", # A001 - "try-except-raise", # TRY302 - "unused-argument", # ARG001, we don't use it - "unused-format-string-argument", #F507 - "unused-format-string-key", # F504 - "unused-import", # F401 - "unused-variable", # F841 - "useless-else-on-loop", # PLW0120 - "wildcard-import", # F403 - "bad-classmethod-argument", # N804 - "consider-iterating-dictionary", # SIM118 - "empty-docstring", # D419 - "invalid-name", # N815 - "line-too-long", # E501, disabled globally - "missing-class-docstring", # D101 - "missing-final-newline", # W292 - "missing-function-docstring", # D103 - "missing-module-docstring", # D100 - "multiple-imports", #E401 - "singleton-comparison", # E711, E712 - "subprocess-run-check", # PLW1510 - "superfluous-parens", # UP034 - "ungrouped-imports", # I001 - "unidiomatic-typecheck", # E721 - "unnecessary-direct-lambda-call", # PLC3002 - "unnecessary-lambda-assignment", # PLC3001 - "unnecessary-pass", # PIE790 - "unneeded-not", # SIM208 - "useless-import-alias", # PLC0414 - "wrong-import-order", # I001 - "wrong-import-position", # E402 - "comparison-of-constants", # PLR0133 - "comparison-with-itself", # PLR0124 - "consider-alternative-union-syntax", # UP007 - "consider-merging-isinstance", # PLR1701 - "consider-using-alias", # UP006 - "consider-using-dict-comprehension", # C402 - "consider-using-generator", # C417 - "consider-using-get", # SIM401 - "consider-using-set-comprehension", # C401 - "consider-using-sys-exit", # PLR1722 - "consider-using-ternary", # SIM108 - "literal-comparison", # F632 - "property-with-parameters", # PLR0206 - "super-with-arguments", # UP008 - "too-many-branches", # PLR0912 - "too-many-return-statements", # PLR0911 - "too-many-statements", # PLR0915 - "trailing-comma-tuple", # COM818 - "unnecessary-comprehension", # C416 - "use-a-generator", # C417 - "use-dict-literal", # C406 - "use-list-literal", # C405 - "useless-object-inheritance", # UP004 - "useless-return", # PLR1711 - "no-else-break", # RET508 - "no-else-continue", # RET507 - "no-else-raise", # RET506 - "no-else-return", # RET505 - "broad-except", # BLE001 - "protected-access", # SLF001 - "broad-exception-raised", # TRY002 - "consider-using-f-string", # PLC0209 - # "no-self-use", # PLR6301 # Optional plugin, not enabled + # Handled by ruff + # Ref: + "await-outside-async", # PLE1142 + "bad-str-strip-call", # PLE1310 + "bad-string-format-type", # PLE1307 + "bidirectional-unicode", # PLE2502 + "continue-in-finally", # PLE0116 + "duplicate-bases", # PLE0241 + "misplaced-bare-raise", # PLE0704 + "format-needs-mapping", # F502 + "function-redefined", # F811 + # Needed because ruff does not understand type of __all__ generated by a function + # "invalid-all-format", # PLE0605 + "invalid-all-object", # PLE0604 + "invalid-character-backspace", # PLE2510 + "invalid-character-esc", # PLE2513 + "invalid-character-nul", # PLE2514 + "invalid-character-sub", # PLE2512 + "invalid-character-zero-width-space", # PLE2515 + "logging-too-few-args", # PLE1206 + "logging-too-many-args", # PLE1205 + "missing-format-string-key", # F524 + "mixed-format-string", # F506 + "no-method-argument", # N805 + "no-self-argument", # N805 + "nonexistent-operator", # B002 + "nonlocal-without-binding", # PLE0117 + "not-in-loop", # F701, F702 + "notimplemented-raised", # F901 + "return-in-init", # PLE0101 + "return-outside-function", # F706 + "syntax-error", # E999 + "too-few-format-args", # F524 + "too-many-format-args", # F522 + "too-many-star-expressions", # F622 + "truncated-format-string", # F501 + "undefined-all-variable", # F822 + "undefined-variable", # F821 + "used-prior-global-declaration", # PLE0118 + "yield-inside-async-function", # PLE1700 + "yield-outside-function", # F704 + "anomalous-backslash-in-string", # W605 + "assert-on-string-literal", # PLW0129 + "assert-on-tuple", # F631 + "bad-format-string", # W1302, F + "bad-format-string-key", # W1300, F + "bare-except", # E722 + "binary-op-exception", # PLW0711 + "cell-var-from-loop", # B023 + # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work + "duplicate-except", # B014 + "duplicate-key", # F601 + "duplicate-string-formatting-argument", # F + "duplicate-value", # F + "eval-used", # S307 + "exec-used", # S102 + "expression-not-assigned", # B018 + "f-string-without-interpolation", # F541 + "forgotten-debug-statement", # T100 + "format-string-without-interpolation", # F + # "global-statement", # PLW0603, ruff catches new occurrences, needs more work + "global-variable-not-assigned", # PLW0602 + "implicit-str-concat", # ISC001 + "import-self", # PLW0406 + "inconsistent-quotes", # Q000 + "invalid-envvar-default", # PLW1508 + "keyword-arg-before-vararg", # B026 + "logging-format-interpolation", # G + "logging-fstring-interpolation", # G + "logging-not-lazy", # G + "misplaced-future", # F404 + "named-expr-without-context", # PLW0131 + "nested-min-max", # PLW3301 + "pointless-statement", # B018 + "raise-missing-from", # B904 + "redefined-builtin", # A001 + "try-except-raise", # TRY302 + "unused-argument", # ARG001, we don't use it + "unused-format-string-argument", #F507 + "unused-format-string-key", # F504 + "unused-import", # F401 + "unused-variable", # F841 + "useless-else-on-loop", # PLW0120 + "wildcard-import", # F403 + "bad-classmethod-argument", # N804 + "consider-iterating-dictionary", # SIM118 + "empty-docstring", # D419 + "invalid-name", # N815 + "line-too-long", # E501, disabled globally + "missing-class-docstring", # D101 + "missing-final-newline", # W292 + "missing-function-docstring", # D103 + "missing-module-docstring", # D100 + "multiple-imports", #E401 + "singleton-comparison", # E711, E712 + "subprocess-run-check", # PLW1510 + "superfluous-parens", # UP034 + "ungrouped-imports", # I001 + "unidiomatic-typecheck", # E721 + "unnecessary-direct-lambda-call", # PLC3002 + "unnecessary-lambda-assignment", # PLC3001 + "unnecessary-pass", # PIE790 + "unneeded-not", # SIM208 + "useless-import-alias", # PLC0414 + "wrong-import-order", # I001 + "wrong-import-position", # E402 + "comparison-of-constants", # PLR0133 + "comparison-with-itself", # PLR0124 + "consider-alternative-union-syntax", # UP007 + "consider-merging-isinstance", # PLR1701 + "consider-using-alias", # UP006 + "consider-using-dict-comprehension", # C402 + "consider-using-generator", # C417 + "consider-using-get", # SIM401 + "consider-using-set-comprehension", # C401 + "consider-using-sys-exit", # PLR1722 + "consider-using-ternary", # SIM108 + "literal-comparison", # F632 + "property-with-parameters", # PLR0206 + "super-with-arguments", # UP008 + "too-many-branches", # PLR0912 + "too-many-return-statements", # PLR0911 + "too-many-statements", # PLR0915 + "trailing-comma-tuple", # COM818 + "unnecessary-comprehension", # C416 + "use-a-generator", # C417 + "use-dict-literal", # C406 + "use-list-literal", # C405 + "useless-object-inheritance", # UP004 + "useless-return", # PLR1711 + "no-else-break", # RET508 + "no-else-continue", # RET507 + "no-else-raise", # RET506 + "no-else-return", # RET505 + "broad-except", # BLE001 + "protected-access", # SLF001 + "broad-exception-raised", # TRY002 + "consider-using-f-string", # PLC0209 + # "no-self-use", # PLR6301 # Optional plugin, not enabled - # Handled by mypy - # Ref: - "abstract-class-instantiated", - "arguments-differ", - "assigning-non-slot", - "assignment-from-no-return", - "assignment-from-none", - "bad-exception-cause", - "bad-format-character", - "bad-reversed-sequence", - "bad-super-call", - "bad-thread-instantiation", - "catching-non-exception", - "comparison-with-callable", - "deprecated-class", - "dict-iter-missing-items", - "format-combined-specification", - "global-variable-undefined", - "import-error", - "inconsistent-mro", - "inherit-non-class", - "init-is-generator", - "invalid-class-object", - "invalid-enum-extension", - "invalid-envvar-value", - "invalid-format-returned", - "invalid-hash-returned", - "invalid-metaclass", - "invalid-overridden-method", - "invalid-repr-returned", - "invalid-sequence-index", - "invalid-slice-index", - "invalid-slots-object", - "invalid-slots", - "invalid-star-assignment-target", - "invalid-str-returned", - "invalid-unary-operand-type", - "invalid-unicode-codec", - "isinstance-second-argument-not-valid-type", - "method-hidden", - "misplaced-format-function", - "missing-format-argument-key", - "missing-format-attribute", - "missing-kwoa", - "no-member", - "no-value-for-parameter", - "non-iterator-returned", - "non-str-assignment-to-dunder-name", - "nonlocal-and-global", - "not-a-mapping", - "not-an-iterable", - "not-async-context-manager", - "not-callable", - "not-context-manager", - "overridden-final-method", - "raising-bad-type", - "raising-non-exception", - "redundant-keyword-arg", - "relative-beyond-top-level", - "self-cls-assignment", - "signature-differs", - "star-needs-assignment-target", - "subclassed-final-class", - "super-without-brackets", - "too-many-function-args", - "typevar-double-variance", - "typevar-name-mismatch", - "unbalanced-dict-unpacking", - "unbalanced-tuple-unpacking", - "unexpected-keyword-arg", - "unhashable-member", - "unpacking-non-sequence", - "unsubscriptable-object", - "unsupported-assignment-operation", - "unsupported-binary-operation", - "unsupported-delete-operation", - "unsupported-membership-test", - "used-before-assignment", - "using-final-decorator-in-unsupported-version", - "wrong-exception-operation", + # Handled by mypy + # Ref: + "abstract-class-instantiated", + "arguments-differ", + "assigning-non-slot", + "assignment-from-no-return", + "assignment-from-none", + "bad-exception-cause", + "bad-format-character", + "bad-reversed-sequence", + "bad-super-call", + "bad-thread-instantiation", + "catching-non-exception", + "comparison-with-callable", + "deprecated-class", + "dict-iter-missing-items", + "format-combined-specification", + "global-variable-undefined", + "import-error", + "inconsistent-mro", + "inherit-non-class", + "init-is-generator", + "invalid-class-object", + "invalid-enum-extension", + "invalid-envvar-value", + "invalid-format-returned", + "invalid-hash-returned", + "invalid-metaclass", + "invalid-overridden-method", + "invalid-repr-returned", + "invalid-sequence-index", + "invalid-slice-index", + "invalid-slots-object", + "invalid-slots", + "invalid-star-assignment-target", + "invalid-str-returned", + "invalid-unary-operand-type", + "invalid-unicode-codec", + "isinstance-second-argument-not-valid-type", + "method-hidden", + "misplaced-format-function", + "missing-format-argument-key", + "missing-format-attribute", + "missing-kwoa", + "no-member", + "no-value-for-parameter", + "non-iterator-returned", + "non-str-assignment-to-dunder-name", + "nonlocal-and-global", + "not-a-mapping", + "not-an-iterable", + "not-async-context-manager", + "not-callable", + "not-context-manager", + "overridden-final-method", + "raising-bad-type", + "raising-non-exception", + "redundant-keyword-arg", + "relative-beyond-top-level", + "self-cls-assignment", + "signature-differs", + "star-needs-assignment-target", + "subclassed-final-class", + "super-without-brackets", + "too-many-function-args", + "typevar-double-variance", + "typevar-name-mismatch", + "unbalanced-dict-unpacking", + "unbalanced-tuple-unpacking", + "unexpected-keyword-arg", + "unhashable-member", + "unpacking-non-sequence", + "unsubscriptable-object", + "unsupported-assignment-operation", + "unsupported-binary-operation", + "unsupported-delete-operation", + "unsupported-membership-test", + "used-before-assignment", + "using-final-decorator-in-unsupported-version", + "wrong-exception-operation", ] enable = [ - #"useless-suppression", # temporarily every now and then to clean them up - "use-symbolic-message-instead", + #"useless-suppression", # temporarily every now and then to clean them up + "use-symbolic-message-instead", ] per-file-ignores = [ - # redefined-outer-name: Tests reference fixtures in the test function - # use-implicit-booleaness-not-comparison: Tests need to validate that a list - # or a dict is returned - "/tests/:redefined-outer-name,use-implicit-booleaness-not-comparison", + # redefined-outer-name: Tests reference fixtures in the test function + # use-implicit-booleaness-not-comparison: Tests need to validate that a list + # or a dict is returned + "/tests/:redefined-outer-name,use-implicit-booleaness-not-comparison", ] [tool.pylint.REPORTS] @@ -424,7 +423,7 @@ score = false [tool.pylint.TYPECHECK] ignored-classes = [ - "_CountingAttr", # for attrs + "_CountingAttr", # for attrs ] mixin-class-rgx = ".*[Mm]ix[Ii]n" @@ -433,9 +432,9 @@ expected-line-ending-format = "LF" [tool.pylint.EXCEPTIONS] overgeneral-exceptions = [ - "builtins.BaseException", - "builtins.Exception", - # "homeassistant.exceptions.HomeAssistantError", # too many issues + "builtins.BaseException", + "builtins.Exception", + # "homeassistant.exceptions.HomeAssistantError", # too many issues ] [tool.pylint.TYPING] @@ -445,241 +444,236 @@ runtime-typing = false max-line-length-suggestions = 72 [tool.pytest.ini_options] -testpaths = [ - "tests", -] -norecursedirs = [ - ".git", - "testing_config", -] +testpaths = ["tests"] +norecursedirs = [".git", "testing_config"] log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" filterwarnings = [ - "error::sqlalchemy.exc.SAWarning", + "error::sqlalchemy.exc.SAWarning", - # -- HomeAssistant - aiohttp - # Overwrite web.Application to pass a custom default argument to _make_request - "ignore:Inheritance class HomeAssistantApplication from web.Application is discouraged:DeprecationWarning", - # Hass wraps `ClientSession.close` to emit a warning if the session is closed accidentally - "ignore:Setting custom ClientSession.close attribute is discouraged:DeprecationWarning:homeassistant.helpers.aiohttp_client", - # Modify app state for testing - "ignore:Changing state of started or joined application is deprecated:DeprecationWarning:tests.components.http.test_ban", + # -- HomeAssistant - aiohttp + # Overwrite web.Application to pass a custom default argument to _make_request + "ignore:Inheritance class HomeAssistantApplication from web.Application is discouraged:DeprecationWarning", + # Hass wraps `ClientSession.close` to emit a warning if the session is closed accidentally + "ignore:Setting custom ClientSession.close attribute is discouraged:DeprecationWarning:homeassistant.helpers.aiohttp_client", + # Modify app state for testing + "ignore:Changing state of started or joined application is deprecated:DeprecationWarning:tests.components.http.test_ban", - # -- Tests - # Ignore custom pytest marks - "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", - "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 - "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", + # -- Tests + # Ignore custom pytest marks + "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", + "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", + # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 + "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", - # -- design choice 3rd party - # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 - "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", - # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", - # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 - "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", + # -- design choice 3rd party + # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 + "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", + # https://github.com/allenporter/ical/pull/215 + # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", + # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 + "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", - # -- Setuptools DeprecationWarnings - # https://github.com/googleapis/google-cloud-python/issues/11184 - # https://github.com/zopefoundation/meta/issues/194 - # https://github.com/Azure/azure-sdk-for-python - "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", + # -- Setuptools DeprecationWarnings + # https://github.com/googleapis/google-cloud-python/issues/11184 + # https://github.com/zopefoundation/meta/issues/194 + # https://github.com/Azure/azure-sdk-for-python + "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", - # -- tracked upstream / open PRs - # - pyOpenSSL v24.2.1 - # https://github.com/certbot/certbot/issues/9828 - v2.11.0 - # https://github.com/certbot/certbot/issues/9992 - "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", - # - other - # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 - # https://github.com/foxel/python_ndms2_client/pull/8 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", + # -- tracked upstream / open PRs + # - pyOpenSSL v24.2.1 + # https://github.com/certbot/certbot/issues/9828 - v2.11.0 + # https://github.com/certbot/certbot/issues/9992 + "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", + # - other + # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 + # https://github.com/foxel/python_ndms2_client/pull/8 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", - # -- fixed, waiting for release / update - # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", - # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 - "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", - # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 - "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", - # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat", - # https://github.com/fwestenberg/devialet/pull/6 - >1.4.5 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", - # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 - # https://github.com/influxdata/influxdb-client-python/pull/652 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", - # https://github.com/majuss/lupupy/pull/15 - >0.3.2 - "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", - # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:nextcord.health_check", - # https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0 - # https://github.com/eclipse/paho.mqtt.python/pull/665 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client", - # https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0 - "ignore::DeprecationWarning:holidays", - # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", - # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 - "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", + # -- fixed, waiting for release / update + # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", + # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 + "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", + # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 + "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", + # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat", + # https://github.com/fwestenberg/devialet/pull/6 - >1.4.5 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", + # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", + # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 + # https://github.com/influxdata/influxdb-client-python/pull/652 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", + # https://github.com/majuss/lupupy/pull/15 - >0.3.2 + "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", + # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:nextcord.health_check", + # https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0 + # https://github.com/eclipse/paho.mqtt.python/pull/665 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client", + # https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0 + "ignore::DeprecationWarning:holidays", + # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", + # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 + "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # -- fixed for Python 3.13 - # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:wyoming.audio", + # -- fixed for Python 3.13 + # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:wyoming.audio", - # -- other - # Locale changes might take some time to resolve upstream - # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 - "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", - # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", - # https://github.com/lidatong/dataclasses-json/issues/328 - # https://github.com/lidatong/dataclasses-json/pull/351 - "ignore:The 'default' argument to fields is deprecated. Use 'dump_default' instead:DeprecationWarning:dataclasses_json.mm", - # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 - # https://github.com/martonperei/emulated_roku - "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", - # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 - "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", - # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", - # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 - "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", - "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel - # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 - "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", - # Wrong stacklevel - # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 - "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", - # New in aiohttp - v3.9.0 - "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", - # - SyntaxWarnings - # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 - "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", - # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 - # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 - "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", - # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 - # https://github.com/koolsb/pyblackbird/pull/9 -> closed - "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", - # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 - "ignore:invalid escape sequence:SyntaxWarning:.*pyws66i", - # https://pypi.org/project/sanix/ - v1.0.6 - 2024-05-01 - # https://github.com/tomaszsluszniak/sanix_py/blob/v1.0.6/sanix/__init__.py#L42 - "ignore:invalid escape sequence:SyntaxWarning:.*sanix", - # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 - "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty - # - pkg_resources - # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", - # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", - # https://pypi.org/project/pybotvac/ - v0.0.25 - 2024-04-11 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", - # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", + # -- other + # Locale changes might take some time to resolve upstream + # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 + "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", + # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", + # https://github.com/lidatong/dataclasses-json/issues/328 + # https://github.com/lidatong/dataclasses-json/pull/351 + "ignore:The 'default' argument to fields is deprecated. Use 'dump_default' instead:DeprecationWarning:dataclasses_json.mm", + # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 + # https://github.com/martonperei/emulated_roku + "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", + # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", + # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 + "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", + # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", + # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 + "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", + "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel + # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 + "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", + # Wrong stacklevel + # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 + "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", + # New in aiohttp - v3.9.0 + "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", + # - SyntaxWarnings + # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 + "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", + # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 + # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 + "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", + # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 + # https://github.com/koolsb/pyblackbird/pull/9 -> closed + "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", + # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 + "ignore:invalid escape sequence:SyntaxWarning:.*pyws66i", + # https://pypi.org/project/sanix/ - v1.0.6 - 2024-05-01 + # https://github.com/tomaszsluszniak/sanix_py/blob/v1.0.6/sanix/__init__.py#L42 + "ignore:invalid escape sequence:SyntaxWarning:.*sanix", + # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 + "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty + # - pkg_resources + # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", + # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", + # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", + # https://pypi.org/project/pybotvac/ - v0.0.25 - 2024-04-11 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", + # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # -- Python 3.13 - # HomeAssistant - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.assist_pipeline.websocket_api", - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.hddtemp.sensor", - # https://pypi.org/project/nextcord/ - v2.6.0 - 2023-09-23 - # https://github.com/nextcord/nextcord/issues/1174 - # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 - # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 - "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", - # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", + # -- Python 3.13 + # HomeAssistant + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.assist_pipeline.websocket_api", + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.hddtemp.sensor", + # https://pypi.org/project/nextcord/ - v2.6.0 - 2023-09-23 + # https://github.com/nextcord/nextcord/issues/1174 + # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", + # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 + # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 + "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", + # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 + # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", - # -- Python 3.13 - unmaintained projects, last release about 2+ years - # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 - "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pydub.utils", - # https://github.com/heathbar/plum-lightpad-python/issues/7 - v0.0.11 - 2018-10-16 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:plumlightpad.lightpad", - # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 - # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", + # -- Python 3.13 - unmaintained projects, last release about 2+ years + # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 + "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pydub.utils", + # https://github.com/heathbar/plum-lightpad-python/issues/7 - v0.0.11 - 2018-10-16 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:plumlightpad.lightpad", + # https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05 + # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", - # -- New in Python 3.13 - # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 - # https://github.com/kurtmckee/feedparser/issues/481 - "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", - # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib - "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", + # -- New in Python 3.13 + # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 + # https://github.com/kurtmckee/feedparser/issues/481 + "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", + # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib + "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", - # -- unmaintained projects, last release about 2+ years - # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", - # https://pypi.org/project/aiomodernforms/ - v0.1.8 - 2021-06-27 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:aiomodernforms.modernforms", - # https://pypi.org/project/alarmdecoder/ - v1.13.11 - 2021-06-01 - "ignore:invalid escape sequence:SyntaxWarning:.*alarmdecoder", - # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", - # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", - # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", - # https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived) - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol", - # https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark` - # https://pypi.org/project/commentjson/ - v0.9.0 - 2020-10-05 - # https://github.com/vaidik/commentjson/issues/51 - # https://github.com/vaidik/commentjson/pull/52 - # Fixed upstream, commentjson depends on old version and seems to be unmaintained - "ignore:module '(sre_parse|sre_constants)' is deprecate:DeprecationWarning:lark.utils", - # https://pypi.org/project/lomond/ - v0.3.3 - 2018-09-21 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:lomond.session", - # https://pypi.org/project/oauth2client/ - v4.1.3 - 2018-09-07 (archived) - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:oauth2client.client", - # https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16 - "ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder", - # https://pypi.org/project/passlib/ - v1.7.4 - 2020-10-08 - "ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning:passlib.utils", - # https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight", - # https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16 - "ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery", - "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)", - # https://pypi.org/project/pure-python-adb/ - v0.3.0.dev0 - 2020-08-05 - "ignore:invalid escape sequence:SyntaxWarning:.*ppadb", - # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 - "ignore:invalid escape sequence:SyntaxWarning:.*pydub.utils", - # https://pypi.org/project/pyiss/ - v1.0.1 - 2016-12-19 - "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", - # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", - # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 - "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", - # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 - "ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_", - "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_", - # https://pypi.org/project/Rx/ - v3.2.0 - 2021-04-25 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", - # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 - "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", + # -- unmaintained projects, last release about 2+ years + # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", + # https://pypi.org/project/aiomodernforms/ - v0.1.8 - 2021-06-27 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:aiomodernforms.modernforms", + # https://pypi.org/project/alarmdecoder/ - v1.13.11 - 2021-06-01 + "ignore:invalid escape sequence:SyntaxWarning:.*alarmdecoder", + # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", + # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", + # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", + # https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived) + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol", + # https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark` + # https://pypi.org/project/commentjson/ - v0.9.0 - 2020-10-05 + # https://github.com/vaidik/commentjson/issues/51 + # https://github.com/vaidik/commentjson/pull/52 + # Fixed upstream, commentjson depends on old version and seems to be unmaintained + "ignore:module '(sre_parse|sre_constants)' is deprecate:DeprecationWarning:lark.utils", + # https://pypi.org/project/lomond/ - v0.3.3 - 2018-09-21 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:lomond.session", + # https://pypi.org/project/oauth2client/ - v4.1.3 - 2018-09-07 (archived) + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:oauth2client.client", + # https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16 + "ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder", + # https://pypi.org/project/passlib/ - v1.7.4 - 2020-10-08 + "ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning:passlib.utils", + # https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight", + # https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16 + "ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery", + "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)", + # https://pypi.org/project/pure-python-adb/ - v0.3.0.dev0 - 2020-08-05 + "ignore:invalid escape sequence:SyntaxWarning:.*ppadb", + # https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10 + "ignore:invalid escape sequence:SyntaxWarning:.*pydub.utils", + # https://pypi.org/project/pyiss/ - v1.0.1 - 2016-12-19 + "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", + # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 + "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", + # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 + "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", + # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 + "ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_", + "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_", + # https://pypi.org/project/Rx/ - v3.2.0 - 2021-04-25 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", + # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 + "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", ] [tool.coverage.run] @@ -687,174 +681,175 @@ source = ["homeassistant"] [tool.coverage.report] exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", - # TYPE_CHECKING and @overload blocks are never executed during pytest run - "if TYPE_CHECKING:", - "@overload", + # Have to re-enable the standard pragma + "pragma: no cover", + # Don't complain about missing debug-only code: + "def __repr__", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + # TYPE_CHECKING and @overload blocks are never executed during pytest run + "if TYPE_CHECKING:", + "@overload", ] [tool.ruff] -required-version = ">=0.9.1" +required-version = ">=0.11.0" [tool.ruff.lint] select = [ - "A001", # Variable {name} is shadowing a Python builtin - "ASYNC", # flake8-async - "B002", # Python does not support the unary prefix increment - "B005", # Using .strip() with multi-character strings is misleading - "B007", # Loop control variable {name} not used within loop body - "B014", # Exception handler with duplicate exception - "B015", # Pointless comparison. Did you mean to assign a value? Otherwise, prepend assert or remove it. - "B017", # pytest.raises(BaseException) should be considered evil - "B018", # Found useless attribute access. Either assign it to a variable or remove it. - "B023", # Function definition does not bind loop variable {name} - "B024", # `{name}` is an abstract base class, but it has no abstract methods or properties - "B026", # Star-arg unpacking after a keyword argument is strongly discouraged - "B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)? - "B035", # Dictionary comprehension uses static key - "B904", # Use raise from to specify exception cause - "B905", # zip() without an explicit strict= parameter - "BLE", - "C", # complexity - "COM818", # Trailing comma on bare tuple prohibited - "D", # docstrings - "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() - "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) - "E", # pycodestyle - "F", # pyflakes/autoflake - "F541", # f-string without any placeholders - "FLY", # flynt - "FURB", # refurb - "G", # flake8-logging-format - "I", # isort - "INP", # flake8-no-pep420 - "ISC", # flake8-implicit-str-concat - "ICN001", # import concentions; {name} should be imported as {asname} - "LOG", # flake8-logging - "N804", # First argument of a class method should be named cls - "N805", # First argument of a method should be named self - "N815", # Variable {name} in class scope should not be mixedCase - "PERF", # Perflint - "PGH", # pygrep-hooks - "PIE", # flake8-pie - "PL", # pylint - "PT", # flake8-pytest-style - "PTH", # flake8-pathlib - "PYI", # flake8-pyi - "RET", # flake8-return - "RSE", # flake8-raise - "RUF005", # Consider iterable unpacking instead of concatenation - "RUF006", # Store a reference to the return value of asyncio.create_task - "RUF007", # Prefer itertools.pairwise() over zip() when iterating over successive pairs - "RUF008", # Do not use mutable default values for dataclass attributes - "RUF010", # Use explicit conversion flag - "RUF013", # PEP 484 prohibits implicit Optional - "RUF016", # Slice in indexed access to type {value_type} uses type {index_type} instead of an integer - "RUF017", # Avoid quadratic list summation - "RUF018", # Avoid assignment expressions in assert statements - "RUF019", # Unnecessary key check before dictionary access - "RUF020", # {never_like} | T is equivalent to T - "RUF021", # Parenthesize a and b expressions when chaining and and or together, to make the precedence clear - "RUF022", # Sort __all__ - "RUF023", # Sort __slots__ - "RUF024", # Do not pass mutable objects as values to dict.fromkeys - "RUF026", # default_factory is a positional-only argument to defaultdict - "RUF030", # print() call in assert statement is likely unintentional - "RUF032", # Decimal() called with float literal argument - "RUF033", # __post_init__ method with argument defaults - "RUF034", # Useless if-else condition - "RUF100", # Unused `noqa` directive - "RUF101", # noqa directives that use redirected rule codes - "RUF200", # Failed to parse pyproject.toml: {message} - "S102", # Use of exec detected - "S103", # bad-file-permissions - "S108", # hardcoded-temp-file - "S306", # suspicious-mktemp-usage - "S307", # suspicious-eval-usage - "S313", # suspicious-xmlc-element-tree-usage - "S314", # suspicious-xml-element-tree-usage - "S315", # suspicious-xml-expat-reader-usage - "S316", # suspicious-xml-expat-builder-usage - "S317", # suspicious-xml-sax-usage - "S318", # suspicious-xml-mini-dom-usage - "S319", # suspicious-xml-pull-dom-usage - "S320", # suspicious-xmle-tree-usage - "S601", # paramiko-call - "S602", # subprocess-popen-with-shell-equals-true - "S604", # call-with-shell-equals-true - "S608", # hardcoded-sql-expression - "S609", # unix-command-wildcard-injection - "SIM", # flake8-simplify - "SLF", # flake8-self - "SLOT", # flake8-slots - "T100", # Trace found: {name} used - "T20", # flake8-print - "TC", # flake8-type-checking - "TID", # Tidy imports - "TRY", # tryceratops - "UP", # pyupgrade - "UP031", # Use format specifiers instead of percent format - "UP032", # Use f-string instead of `format` call - "W", # pycodestyle + "A001", # Variable {name} is shadowing a Python builtin + "ASYNC", # flake8-async + "B002", # Python does not support the unary prefix increment + "B005", # Using .strip() with multi-character strings is misleading + "B007", # Loop control variable {name} not used within loop body + "B014", # Exception handler with duplicate exception + "B015", # Pointless comparison. Did you mean to assign a value? Otherwise, prepend assert or remove it. + "B017", # pytest.raises(BaseException) should be considered evil + "B018", # Found useless attribute access. Either assign it to a variable or remove it. + "B023", # Function definition does not bind loop variable {name} + "B024", # `{name}` is an abstract base class, but it has no abstract methods or properties + "B026", # Star-arg unpacking after a keyword argument is strongly discouraged + "B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)? + "B035", # Dictionary comprehension uses static key + "B904", # Use raise from to specify exception cause + "B905", # zip() without an explicit strict= parameter + "BLE", + "C", # complexity + "COM818", # Trailing comma on bare tuple prohibited + "D", # docstrings + "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() + "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) + "E", # pycodestyle + "F", # pyflakes/autoflake + "F541", # f-string without any placeholders + "FLY", # flynt + "FURB", # refurb + "G", # flake8-logging-format + "I", # isort + "INP", # flake8-no-pep420 + "ISC", # flake8-implicit-str-concat + "ICN001", # import concentions; {name} should be imported as {asname} + "LOG", # flake8-logging + "N804", # First argument of a class method should be named cls + "N805", # First argument of a method should be named self + "N815", # Variable {name} in class scope should not be mixedCase + "PERF", # Perflint + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "PTH", # flake8-pathlib + "PYI", # flake8-pyi + "RET", # flake8-return + "RSE", # flake8-raise + "RUF005", # Consider iterable unpacking instead of concatenation + "RUF006", # Store a reference to the return value of asyncio.create_task + "RUF007", # Prefer itertools.pairwise() over zip() when iterating over successive pairs + "RUF008", # Do not use mutable default values for dataclass attributes + "RUF010", # Use explicit conversion flag + "RUF013", # PEP 484 prohibits implicit Optional + "RUF016", # Slice in indexed access to type {value_type} uses type {index_type} instead of an integer + "RUF017", # Avoid quadratic list summation + "RUF018", # Avoid assignment expressions in assert statements + "RUF019", # Unnecessary key check before dictionary access + "RUF020", # {never_like} | T is equivalent to T + "RUF021", # Parenthesize a and b expressions when chaining and and or together, to make the precedence clear + "RUF022", # Sort __all__ + "RUF023", # Sort __slots__ + "RUF024", # Do not pass mutable objects as values to dict.fromkeys + "RUF026", # default_factory is a positional-only argument to defaultdict + "RUF030", # print() call in assert statement is likely unintentional + "RUF032", # Decimal() called with float literal argument + "RUF033", # __post_init__ method with argument defaults + "RUF034", # Useless if-else condition + "RUF100", # Unused `noqa` directive + "RUF101", # noqa directives that use redirected rule codes + "RUF200", # Failed to parse pyproject.toml: {message} + "S102", # Use of exec detected + "S103", # bad-file-permissions + "S108", # hardcoded-temp-file + "S306", # suspicious-mktemp-usage + "S307", # suspicious-eval-usage + "S313", # suspicious-xmlc-element-tree-usage + "S314", # suspicious-xml-element-tree-usage + "S315", # suspicious-xml-expat-reader-usage + "S316", # suspicious-xml-expat-builder-usage + "S317", # suspicious-xml-sax-usage + "S318", # suspicious-xml-mini-dom-usage + "S319", # suspicious-xml-pull-dom-usage + "S601", # paramiko-call + "S602", # subprocess-popen-with-shell-equals-true + "S604", # call-with-shell-equals-true + "S608", # hardcoded-sql-expression + "S609", # unix-command-wildcard-injection + "SIM", # flake8-simplify + "SLF", # flake8-self + "SLOT", # flake8-slots + "T100", # Trace found: {name} used + "T20", # flake8-print + "TC", # flake8-type-checking + "TID", # Tidy imports + "TRY", # tryceratops + "UP", # pyupgrade + "UP031", # Use format specifiers instead of percent format + "UP032", # Use f-string instead of `format` call + "W", # pycodestyle ] ignore = [ - "ASYNC109", # Async function definition with a `timeout` parameter Use `asyncio.timeout` instead - "ASYNC110", # Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop - "D202", # No blank lines allowed after function docstring - "D203", # 1 blank line required before class docstring - "D213", # Multi-line docstring summary should start at the second line - "D406", # Section name should end with a newline - "D407", # Section name underlining - "E501", # line too long + "ASYNC109", # Async function definition with a `timeout` parameter Use `asyncio.timeout` instead + "ASYNC110", # Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop + "D202", # No blank lines allowed after function docstring + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "D406", # Section name should end with a newline + "D407", # Section name underlining + "E501", # line too long - "PLC1901", # {existing} can be simplified to {replacement} as an empty string is falsey; too many false positives - "PLR0911", # Too many return statements ({returns} > {max_returns}) - "PLR0912", # Too many branches ({branches} > {max_branches}) - "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) - "PLR0915", # Too many statements ({statements} > {max_statements}) - "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable - "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target - "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception - "PT018", # Assertion should be broken down into multiple parts - "RUF001", # String contains ambiguous unicode character. - "RUF002", # Docstring contains ambiguous unicode character. - "RUF003", # Comment contains ambiguous unicode character. - "RUF015", # Prefer next(...) over single element slice - "SIM102", # Use a single if statement instead of nested if statements - "SIM103", # Return the condition {condition} directly - "SIM108", # Use ternary operator {contents} instead of if-else-block - "SIM115", # Use context handler for opening files + "PLC1901", # {existing} can be simplified to {replacement} as an empty string is falsey; too many false positives + "PLR0911", # Too many return statements ({returns} > {max_returns}) + "PLR0912", # Too many branches ({branches} > {max_branches}) + "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) + "PLR0915", # Too many statements ({statements} > {max_statements}) + "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable + "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target + "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception + "PT018", # Assertion should be broken down into multiple parts + "RUF001", # String contains ambiguous unicode character. + "RUF002", # Docstring contains ambiguous unicode character. + "RUF003", # Comment contains ambiguous unicode character. + "RUF015", # Prefer next(...) over single element slice + "SIM102", # Use a single if statement instead of nested if statements + "SIM103", # Return the condition {condition} directly + "SIM108", # Use ternary operator {contents} instead of if-else-block + "SIM115", # Use context handler for opening files - # Moving imports into type-checking blocks can mess with pytest.patch() - "TC001", # Move application import {} into a type-checking block - "TC002", # Move third-party import {} into a type-checking block - "TC003", # Move standard library import {} into a type-checking block + # Moving imports into type-checking blocks can mess with pytest.patch() + "TC001", # Move application import {} into a type-checking block + "TC002", # Move third-party import {} into a type-checking block + "TC003", # Move standard library import {} into a type-checking block + # Quotes for typing.cast generally not necessary, only for performance critical paths + "TC006", # Add quotes to type expression in typing.cast() - "TRY003", # Avoid specifying long messages outside the exception class - "TRY400", # Use `logging.exception` instead of `logging.error` - # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 - "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` + "TRY003", # Avoid specifying long messages outside the exception class + "TRY400", # Use `logging.exception` instead of `logging.error` + # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 + "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` - # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules - "W191", - "E111", - "E114", - "E117", - "D206", - "D300", - "Q", - "COM812", - "COM819", + # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q", + "COM812", + "COM819", - # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605" + # Disabled because ruff does not understand type of __all__ generated by a function + "PLE0605", ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] @@ -930,9 +925,7 @@ mark-parentheses = false [tool.ruff.lint.isort] force-sort-within-sections = true -known-first-party = [ - "homeassistant", -] +known-first-party = ["homeassistant"] combine-as-imports = true split-on-trailing-comma = false diff --git a/requirements.txt b/requirements.txt index 6ae428d5420..378240607cf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,11 +5,12 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.3.0 -aiohttp==3.11.13 +aiohttp==3.11.14 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.3 aiohttp-asyncmdnsresolver==0.1.1 aiozoneinfo==0.2.3 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 attrs==25.1.0 @@ -32,20 +33,20 @@ cryptography==44.0.1 Pillow==11.1.0 propcache==0.3.0 pyOpenSSL==25.0.0 -orjson==3.10.15 +orjson==3.10.16 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 PyYAML==6.0.2 requests==2.32.3 securetar==2025.2.1 -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 +typing-extensions>=4.13.0,<5.0 ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.1 +uv==0.6.10 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.6 diff --git a/requirements_all.txt b/requirements_all.txt index 2cf57251ac6..d7db5450a5f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.56.1 +PySwitchbot==0.58.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -116,7 +116,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -179,7 +179,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -201,7 +201,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.1 +aioautomower==2025.3.2 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -213,7 +213,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.11.2 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==29.4.1 +aioesphomeapi==29.7.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -258,7 +258,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.4.1 +aioharmony==0.5.2 # homeassistant.components.hassio aiohasupervisor==0.3.0 @@ -371,7 +371,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==13.2.0 +aioshelly==13.4.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -422,7 +422,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.1 +aiowebdav2==0.4.2 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -464,7 +464,7 @@ amcrest==1.9.8 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 @@ -479,7 +479,7 @@ anthemav==1.4.1 anthropic==0.47.2 # homeassistant.components.mcp_server -anyio==4.8.0 +anyio==4.9.0 # homeassistant.components.weatherkit apple_weatherkit==1.1.3 @@ -557,7 +557,7 @@ av==13.1.0 axis==64 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.5 +ayla-iot-unofficial==1.4.7 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -603,7 +603,7 @@ bizkaibus==0.1.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.11.0 +bleak-esphome==2.12.0 # homeassistant.components.bluetooth bleak-retry-connector==3.9.0 @@ -627,24 +627,26 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.decora -# homeassistant.components.zengge # bluepy==1.3.0 # homeassistant.components.bluetooth bluetooth-adapters==0.21.4 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.4 +bluetooth-auto-recovery==1.4.5 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.25.0 +bluetooth-data-tools==1.26.1 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 @@ -656,7 +658,7 @@ boto3==1.34.131 botocore==1.34.131 # homeassistant.components.bring -bring-api==1.0.2 +bring-api==1.1.0 # homeassistant.components.broadlink broadlink==0.19.0 @@ -744,7 +746,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.39.3 +dbus-fast==2.43.0 # homeassistant.components.debugpy debugpy==1.8.13 @@ -756,7 +758,7 @@ debugpy==1.8.13 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -899,7 +901,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==1.0.3 +evohome-async==1.0.4 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 @@ -968,16 +970,16 @@ freesms==0.2.0 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.7.0 +fyta_cli==0.7.2 # homeassistant.components.google_translate gTTS==2.5.3 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.5.0 +gardena-bluetooth==1.6.0 # homeassistant.components.google_assistant_sdk -gassist-text==0.0.11 +gassist-text==0.0.12 # homeassistant.components.google gcal-sync==7.0.0 @@ -1030,7 +1032,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.28.0 +google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud google-cloud-speech==2.27.0 @@ -1039,7 +1041,7 @@ google-cloud-speech==2.27.0 google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -1105,11 +1107,14 @@ ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.2.2 +# homeassistant.components.homeassistant_hardware +ha-silabs-firmware-client==0.2.0 + # homeassistant.components.habitica habiticalib==0.3.7 # homeassistant.components.bluetooth -habluetooth==3.25.0 +habluetooth==3.37.0 # homeassistant.components.cloud hass-nabucasa==0.94.0 @@ -1121,7 +1126,7 @@ hass-splunk==0.1.1 hassil==2.2.3 # homeassistant.components.jewish_calendar -hdate==0.11.1 +hdate[astral]==1.0.3 # homeassistant.components.heatmiser heatmiserV3==2.0.3 @@ -1149,13 +1154,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250326.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.5 +home-assistant-intents==2025.3.24 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1190,7 +1195,8 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.3.0 +# homeassistant.components.remote_calendar +ical==9.0.1 # homeassistant.components.caldav icalendar==6.1.0 @@ -1214,7 +1220,7 @@ igloohome-api==0.1.0 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1226,7 +1232,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.7.1 +inkbird-ble==0.9.0 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -1281,7 +1287,7 @@ kiwiki-client==0.1.1 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2025.1.30.194235 +knx-frontend==2025.3.8.214559 # homeassistant.components.konnected konnected==1.2.0 @@ -1378,7 +1384,7 @@ mbddns==0.1.2 # homeassistant.components.mcp # homeassistant.components.mcp_server -mcp==1.1.2 +mcp==1.5.0 # homeassistant.components.minecraft_server mcstatus==11.1.1 @@ -1399,7 +1405,7 @@ messagebird==1.2.0 meteoalertapi==0.3.1 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1423,7 +1429,7 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.1 +moehlenhoff-alpha2==1.4.0 # homeassistant.components.monzo monzopy==1.4.2 @@ -1477,13 +1483,13 @@ netdata==1.3.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==4.0.0 +nettigo-air-monitor==4.1.0 # homeassistant.components.neurio_energy neurio==0.3.1 # homeassistant.components.nexia -nexia==2.2.2 +nexia==2.4.0 # homeassistant.components.nextcloud nextcloudmonitor==1.5.1 @@ -1498,7 +1504,7 @@ nextdns==4.0.0 nhc==0.4.10 # homeassistant.components.nibe_heatpump -nibe==2.14.0 +nibe==2.17.0 # homeassistant.components.nice_go nice-go==1.0.1 @@ -1553,7 +1559,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ohme -ohme==1.4.0 +ohme==1.4.1 # homeassistant.components.ollama ollama==0.4.7 @@ -1577,7 +1583,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.61.0 +openai==1.68.2 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1704,7 +1710,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1730,6 +1736,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1779,7 +1788,7 @@ pyEmby==1.10 pyHik==0.3.2 # homeassistant.components.homee -pyHomee==1.2.7 +pyHomee==1.2.8 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1813,7 +1822,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.7 +pyaprilaire==0.8.1 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1882,7 +1891,7 @@ pycsspeechtts==1.0.8 # pycups==2.0.4 # homeassistant.components.daikin -pydaikin==2.13.8 +pydaikin==2.14.1 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1891,7 +1900,7 @@ pydanfossair==0.1.0 pydeako==0.6.0 # homeassistant.components.deconz -pydeconz==118 +pydeconz==120 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1906,7 +1915,7 @@ pydiscovergy==3.0.2 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2025.2.0 +pydrawise==2025.3.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1996,7 +2005,7 @@ pygti==0.9.4 pyhaversion==22.8.0 # homeassistant.components.heos -pyheos==1.0.2 +pyheos==1.0.4 # homeassistant.components.hive pyhive-integration==1.0.2 @@ -2077,7 +2086,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.7 +pylamarzocco==1.4.9 # homeassistant.components.lastfm pylast==5.1.0 @@ -2098,7 +2107,7 @@ pylitejet==0.6.3 pylitterbot==2024.0.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.23.0 +pylutron-caseta==0.24.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -2199,7 +2208,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.2 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -2310,7 +2319,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.0 +pysmartthings==3.0.0 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -2322,7 +2331,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -2421,7 +2430,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.1.3 +python-linkplay==0.2.1 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2452,7 +2461,7 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 @@ -2461,19 +2470,19 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.11.1 +python-roborock==2.16.1 # homeassistant.components.smarttub python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.1 +python-snoo==0.6.4 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2618,7 +2627,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.1 +reolink-aio==0.13.0 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2872,7 +2881,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.12 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2881,7 +2890,7 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.teslemetry -teslemetry-stream==0.6.10 +teslemetry-stream==0.6.12 # homeassistant.components.tessie tessie-api==0.1.1 @@ -2899,7 +2908,7 @@ thermopro-ble==0.11.0 thingspeak==1.0.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.4 +thinqconnect==1.0.5 # homeassistant.components.tikteck tikteck==0.4 @@ -2926,7 +2935,7 @@ total-connect-client==2025.1.4 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.4 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2974,7 +2983,7 @@ unifi_ap==0.0.2 unifiled==0.11 # homeassistant.components.homeassistant_hardware -universal-silabs-flasher==0.0.29 +universal-silabs-flasher==0.0.30 # homeassistant.components.upb upb-lib==0.6.1 @@ -3000,7 +3009,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2025.1.1 +velbus-aio==2025.3.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -3076,7 +3085,7 @@ wirelesstagpy==0.8.1 wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.19 +wolf-comm==0.0.23 # homeassistant.components.wyoming wyoming==1.5.4 @@ -3091,7 +3100,7 @@ xiaomi-ble==0.33.0 xknx==3.6.0 # homeassistant.components.knx -xknxproject==3.8.1 +xknxproject==3.8.2 # homeassistant.components.fritz # homeassistant.components.rest @@ -3122,7 +3131,7 @@ yeelight==0.7.16 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -3131,7 +3140,7 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zabbix zabbix-utils==2.0.2 @@ -3139,9 +3148,6 @@ zabbix-utils==2.0.2 # homeassistant.components.zamg zamg==0.3.6 -# homeassistant.components.zengge -zengge==0.2 - # homeassistant.components.zeroconf zeroconf==0.146.0 @@ -3149,7 +3155,7 @@ zeroconf==0.146.0 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.51 +zha==0.0.54 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 @@ -3161,7 +3167,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.60.1 +zwave-js-server-python==0.62.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index f40ed46a82f..c7bb9b11b87 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,18 +7,18 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.3.8 +astroid==3.3.9 coverage==7.6.12 freezegun==1.5.1 license-expression==30.4.1 mock-open==1.4.0 -mypy-dev==1.16.0a5 +mypy-dev==1.16.0a7 pre-commit==4.0.0 pydantic==2.10.6 -pylint==3.3.4 +pylint==3.3.6 pylint-per-file-ignores==1.4.0 -pipdeptree==2.25.0 -pytest-asyncio==0.25.3 +pipdeptree==2.25.1 +pytest-asyncio==0.26.0 pytest-aiohttp==1.1.0 pytest-cov==6.0.0 pytest-freezer==0.4.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c17f56f5eb1..229c1a76559 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -42,7 +42,7 @@ PlexAPI==4.15.16 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.56.1 +PySwitchbot==0.58.0 # homeassistant.components.syncthru PySyncThru==0.8.0 @@ -110,7 +110,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -167,7 +167,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -189,7 +189,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.1 +aioautomower==2025.3.2 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -201,7 +201,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.11.2 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==29.4.1 +aioesphomeapi==29.7.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -243,7 +243,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.4.1 +aioharmony==0.5.2 # homeassistant.components.hassio aiohasupervisor==0.3.0 @@ -353,7 +353,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==13.2.0 +aioshelly==13.4.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -404,7 +404,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.1 +aiowebdav2==0.4.2 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -440,7 +440,7 @@ amberelectric==2.0.12 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anova anova-wifi==0.17.0 @@ -452,7 +452,7 @@ anthemav==1.4.1 anthropic==0.47.2 # homeassistant.components.mcp_server -anyio==4.8.0 +anyio==4.9.0 # homeassistant.components.weatherkit apple_weatherkit==1.1.3 @@ -506,7 +506,7 @@ av==13.1.0 axis==64 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.5 +ayla-iot-unofficial==1.4.7 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -534,7 +534,7 @@ bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.11.0 +bleak-esphome==2.12.0 # homeassistant.components.bluetooth bleak-retry-connector==3.9.0 @@ -558,17 +558,20 @@ bluemaestro-ble==0.2.3 bluetooth-adapters==0.21.4 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.4 +bluetooth-auto-recovery==1.4.5 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.25.0 +bluetooth-data-tools==1.26.1 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 @@ -576,7 +579,7 @@ boschshcpy==0.2.91 botocore==1.34.131 # homeassistant.components.bring -bring-api==1.0.2 +bring-api==1.1.0 # homeassistant.components.broadlink broadlink==0.19.0 @@ -640,13 +643,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.39.3 +dbus-fast==2.43.0 # homeassistant.components.debugpy debugpy==1.8.13 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -765,7 +768,7 @@ eternalegypt==0.0.16 eufylife-ble-client==0.1.8 # homeassistant.components.evohome -evohome-async==1.0.3 +evohome-async==1.0.4 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 @@ -821,16 +824,16 @@ freebox-api==1.2.2 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.7.0 +fyta_cli==0.7.2 # homeassistant.components.google_translate gTTS==2.5.3 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.5.0 +gardena-bluetooth==1.6.0 # homeassistant.components.google_assistant_sdk -gassist-text==0.0.11 +gassist-text==0.0.12 # homeassistant.components.google gcal-sync==7.0.0 @@ -880,7 +883,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.28.0 +google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud google-cloud-speech==2.27.0 @@ -889,7 +892,7 @@ google-cloud-speech==2.27.0 google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -950,7 +953,7 @@ ha-philipsjs==3.2.2 habiticalib==0.3.7 # homeassistant.components.bluetooth -habluetooth==3.25.0 +habluetooth==3.37.0 # homeassistant.components.cloud hass-nabucasa==0.94.0 @@ -959,7 +962,7 @@ hass-nabucasa==0.94.0 hassil==2.2.3 # homeassistant.components.jewish_calendar -hdate==0.11.1 +hdate[astral]==1.0.3 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -978,13 +981,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250326.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.5 +home-assistant-intents==2025.3.24 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1010,7 +1013,8 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.3.0 +# homeassistant.components.remote_calendar +ical==9.0.1 # homeassistant.components.caldav icalendar==6.1.0 @@ -1028,7 +1032,7 @@ ifaddr==0.2.0 igloohome-api==0.1.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1040,7 +1044,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.7.1 +inkbird-ble==0.9.0 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -1083,7 +1087,7 @@ kegtron-ble==0.4.0 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2025.1.30.194235 +knx-frontend==2025.3.8.214559 # homeassistant.components.konnected konnected==1.2.0 @@ -1156,7 +1160,7 @@ mbddns==0.1.2 # homeassistant.components.mcp # homeassistant.components.mcp_server -mcp==1.1.2 +mcp==1.5.0 # homeassistant.components.minecraft_server mcstatus==11.1.1 @@ -1171,7 +1175,7 @@ medcom-ble==0.1.1 melnor-bluetooth==0.0.25 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1195,7 +1199,7 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.1 +moehlenhoff-alpha2==1.4.0 # homeassistant.components.monzo monzopy==1.4.2 @@ -1243,10 +1247,10 @@ nessclient==1.1.2 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==4.0.0 +nettigo-air-monitor==4.1.0 # homeassistant.components.nexia -nexia==2.2.2 +nexia==2.4.0 # homeassistant.components.nextcloud nextcloudmonitor==1.5.1 @@ -1261,7 +1265,7 @@ nextdns==4.0.0 nhc==0.4.10 # homeassistant.components.nibe_heatpump -nibe==2.14.0 +nibe==2.17.0 # homeassistant.components.nice_go nice-go==1.0.1 @@ -1301,7 +1305,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ohme -ohme==1.4.0 +ohme==1.4.1 # homeassistant.components.ollama ollama==0.4.7 @@ -1325,7 +1329,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.61.0 +openai==1.68.2 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1408,7 +1412,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pushbullet pushbullet.py==0.11.0 @@ -1431,6 +1435,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1468,7 +1475,7 @@ pyDuotecno==2024.10.1 pyElectra==1.2.4 # homeassistant.components.homee -pyHomee==1.2.7 +pyHomee==1.2.8 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1493,7 +1500,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.7 +pyaprilaire==0.8.1 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1541,13 +1548,13 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.8 +pydaikin==2.14.1 # homeassistant.components.deako pydeako==0.6.0 # homeassistant.components.deconz -pydeconz==118 +pydeconz==120 # homeassistant.components.dexcom pydexcom==0.2.3 @@ -1556,7 +1563,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.2 # homeassistant.components.hydrawise -pydrawise==2025.2.0 +pydrawise==2025.3.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1625,7 +1632,7 @@ pygti==0.9.4 pyhaversion==22.8.0 # homeassistant.components.heos -pyheos==1.0.2 +pyheos==1.0.4 # homeassistant.components.hive pyhive-integration==1.0.2 @@ -1691,7 +1698,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.7 +pylamarzocco==1.4.9 # homeassistant.components.lastfm pylast==5.1.0 @@ -1712,7 +1719,7 @@ pylitejet==0.6.3 pylitterbot==2024.0.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.23.0 +pylutron-caseta==0.24.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -1795,7 +1802,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.2 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -1882,7 +1889,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.0 +pysmartthings==3.0.0 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -1894,7 +1901,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -1960,7 +1967,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.1.3 +python-linkplay==0.2.1 # homeassistant.components.matter python-matter-server==7.0.0 @@ -1988,25 +1995,25 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.11.1 +python-roborock==2.16.1 # homeassistant.components.smarttub python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.1 +python-snoo==0.6.4 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2121,7 +2128,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.1 +reolink-aio==0.13.0 # homeassistant.components.rflink rflink==0.0.66 @@ -2312,7 +2319,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.12 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2321,7 +2328,7 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.teslemetry -teslemetry-stream==0.6.10 +teslemetry-stream==0.6.12 # homeassistant.components.tessie tessie-api==0.1.1 @@ -2333,7 +2340,7 @@ thermobeacon-ble==0.8.1 thermopro-ble==0.11.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.4 +thinqconnect==1.0.5 # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2351,7 +2358,7 @@ toonapi==0.3.0 total-connect-client==2025.1.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.4 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2416,7 +2423,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2025.1.1 +velbus-aio==2025.3.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2477,7 +2484,7 @@ wiffi==1.1.2 wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.19 +wolf-comm==0.0.23 # homeassistant.components.wyoming wyoming==1.5.4 @@ -2492,7 +2499,7 @@ xiaomi-ble==0.33.0 xknx==3.6.0 # homeassistant.components.knx -xknxproject==3.8.1 +xknxproject==3.8.2 # homeassistant.components.fritz # homeassistant.components.rest @@ -2517,7 +2524,7 @@ yalexs==8.10.0 yeelight==0.7.16 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -2526,7 +2533,7 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zamg zamg==0.3.6 @@ -2538,10 +2545,10 @@ zeroconf==0.146.0 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.51 +zha==0.0.54 # homeassistant.components.zwave_js -zwave-js-server-python==0.60.1 +zwave-js-server-python==0.62.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 1cf9ef3fcf5..ff86915bbf3 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.4.1 -ruff==0.9.10 +ruff==0.11.0 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index fa823fa4834..1be6286d30c 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -139,7 +139,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.8.0 +anyio==4.9.0 h11==0.14.0 httpcore==1.0.7 diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index f842ec61b97..1f8b7d1139b 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -95,7 +95,6 @@ def _populate_brand_integrations( integration = integrations.get(domain) if not integration or integration.integration_type in ( "entity", - "hardware", "system", ): continue @@ -171,7 +170,7 @@ def _generate_integrations( result["integration"][domain] = metadata else: # integration integration = integrations[domain] - if integration.integration_type in ("entity", "system", "hardware"): + if integration.integration_type in ("entity", "system"): continue if integration.translated_name: diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index b22027500dd..52ea79d32fe 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -173,10 +173,11 @@ IGNORE_VIOLATIONS = { "logbook", # Temporary needed for migration until 2024.10 ("conversation", "assist_pipeline"), - # The onboarding integration provides a limited backup API used during - # onboarding. The onboarding integration waits for the backup manager - # to be ready before calling any backup functionality. + # The onboarding integration provides limited backup and cloud APIs for use + # during onboarding. The onboarding integration waits for the backup manager + # and cloud to be ready before calling any backup or cloud functionality. ("onboarding", "backup"), + ("onboarding", "cloud"), } diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 104939c3808..21e97ac097b 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.6.1,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.6.10,source=/uv,target=/bin/uv \ # Uv creates a lock file in /tmp --mount=type=tmpfs,target=/tmp \ # Required for PyTurboJPEG @@ -24,8 +24,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.6.1,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.25.0 tqdm==4.67.1 ruff==0.9.10 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.5 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + stdlib-list==0.10.0 pipdeptree==2.25.1 tqdm==4.67.1 ruff==0.11.0 \ + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.24 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 65e9d4ed9cc..ea6e657ec50 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -515,7 +515,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "ihc", "imgw_pib", "improv_ble", - "incomfort", "influxdb", "inkbird", "insteon", @@ -921,7 +920,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "sma", "smappee", "smart_meter_texas", - "smartthings", "smarttub", "smarty", "smhi", @@ -1088,7 +1086,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "vizio", "vlc", "vlc_telnet", - "vodafone_station", "voicerss", "voip", "volkszaehler", @@ -1579,7 +1576,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "imap", "imgw_pib", "improv_ble", - "incomfort", "influxdb", "inkbird", "insteon", @@ -1931,7 +1927,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "risco", "rituals_perfume_genie", "rmvtransport", - "roborock", "rocketchat", "roku", "romy", @@ -1961,7 +1956,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "season", "sendgrid", "sense", - "sensibo", "sensirion_ble", "sensorpro", "sensorpush", @@ -2000,7 +1994,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "sma", "smappee", "smart_meter_texas", - "smartthings", "smarttub", "smarty", "smhi", @@ -2173,7 +2166,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "vizio", "vlc", "vlc_telnet", - "vodafone_station", "voicerss", "voip", "volkszaehler", diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index c257f185f51..f4c05f504ca 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -29,6 +29,7 @@ ALLOW_NAME_TRANSLATION = { "cert_expiry", "cpuspeed", "emulated_roku", + "energenie_power_sockets", "faa_delays", "garages_amsterdam", "generic", @@ -40,6 +41,7 @@ ALLOW_NAME_TRANSLATION = { "local_ip", "local_todo", "nmap_tracker", + "remote_calendar", "rpi_power", "swiss_public_transport", "waze_travel_time", diff --git a/tests/common.py b/tests/common.py index df674d1824c..f426d2aebd2 100644 --- a/tests/common.py +++ b/tests/common.py @@ -29,6 +29,7 @@ from typing import Any, Literal, NoReturn from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 +from annotatedyaml import load_yaml_dict, loader as yaml_loader import pytest from syrupy import SnapshotAssertion import voluptuous as vol @@ -109,7 +110,6 @@ from homeassistant.util.json import ( ) from homeassistant.util.signal_type import SignalType from homeassistant.util.unit_system import METRIC_SYSTEM -from homeassistant.util.yaml import load_yaml_dict, loader as yaml_loader from .testing_config.custom_components.test_constant_deprecation import ( import_deprecated_constant, diff --git a/tests/components/airthings_ble/test_config_flow.py b/tests/components/airthings_ble/test_config_flow.py index 314594c612f..2adc5498e7b 100644 --- a/tests/components/airthings_ble/test_config_flow.py +++ b/tests/components/airthings_ble/test_config_flow.py @@ -159,7 +159,6 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None: domain=DOMAIN, unique_id="cc:cc:cc:cc:cc:cc", source=SOURCE_IGNORE, - data={CONF_ADDRESS: "cc:cc:cc:cc:cc:cc"}, ) entry.add_to_hass(hass) with ( diff --git a/tests/components/ambient_network/snapshots/test_sensor.ambr b/tests/components/ambient_network/snapshots/test_sensor.ambr index 8637471cc60..ddf05c99b88 100644 --- a/tests/components/ambient_network/snapshots/test_sensor.ambr +++ b/tests/components/ambient_network/snapshots/test_sensor.ambr @@ -815,7 +815,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -854,6 +856,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station A Wind direction', 'last_measured': HAFakeDatetime(2023, 11, 8, 12, 12, 0, 914000, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1800,7 +1803,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -1839,6 +1844,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station C Wind direction', 'last_measured': HAFakeDatetime(2024, 6, 6, 8, 28, 3, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2722,7 +2728,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -2760,6 +2768,7 @@ 'attribution': 'Data provided by ambientnetwork.net', 'device_class': 'wind_direction', 'friendly_name': 'Station D Wind direction', + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py index f8ab098cc09..7419ea6c28f 100644 --- a/tests/components/anthropic/conftest.py +++ b/tests/components/anthropic/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import patch import pytest +from homeassistant.components.anthropic import CONF_CHAT_MODEL from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant from homeassistant.helpers import llm @@ -38,6 +39,21 @@ def mock_config_entry_with_assist( return mock_config_entry +@pytest.fixture +def mock_config_entry_with_extended_thinking( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_CHAT_MODEL: "claude-3-7-sonnet-latest", + }, + ) + return mock_config_entry + + @pytest.fixture async def mock_init_component( hass: HomeAssistant, mock_config_entry: MockConfigEntry diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr index de414019317..c0ed986f002 100644 --- a/tests/components/anthropic/snapshots/test_conversation.ambr +++ b/tests/components/anthropic/snapshots/test_conversation.ambr @@ -1,4 +1,321 @@ # serializer version: 1 +# name: test_extended_thinking_tool_call + list([ + dict({ + 'content': ''' + Current time is 16:00:00. Today's date is 2024-06-03. + You are a voice assistant for Home Assistant. + Answer questions about the world truthfully. + Answer in plain text. Keep it simple and to the point. + Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. + ''', + 'role': 'system', + }), + dict({ + 'content': 'Please call the test function', + 'role': 'user', + }), + dict({ + 'agent_id': 'conversation.claude', + 'content': 'Certainly, calling it now!', + 'role': 'assistant', + 'tool_calls': list([ + dict({ + 'id': 'toolu_0123456789AbCdEfGhIjKlM', + 'tool_args': dict({ + 'param1': 'test_value', + }), + 'tool_name': 'test_tool', + }), + ]), + }), + dict({ + 'agent_id': 'conversation.claude', + 'role': 'tool_result', + 'tool_call_id': 'toolu_0123456789AbCdEfGhIjKlM', + 'tool_name': 'test_tool', + 'tool_result': 'Test response', + }), + dict({ + 'agent_id': 'conversation.claude', + 'content': 'I have successfully called the function', + 'role': 'assistant', + 'tool_calls': None, + }), + ]) +# --- +# name: test_extended_thinking_tool_call.1 + list([ + dict({ + 'content': 'Please call the test function', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', + 'thinking': 'The user asked me to call a test function.Is it a test? What would the function do? Would it violate any privacy or security policies?', + 'type': 'thinking', + }), + dict({ + 'data': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', + 'type': 'redacted_thinking', + }), + dict({ + 'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', + 'thinking': "Okay, let's give it a shot. Will I pass the test?", + 'type': 'thinking', + }), + dict({ + 'text': 'Certainly, calling it now!', + 'type': 'text', + }), + dict({ + 'id': 'toolu_0123456789AbCdEfGhIjKlM', + 'input': dict({ + 'param1': 'test_value', + }), + 'name': 'test_tool', + 'type': 'tool_use', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': list([ + dict({ + 'content': '"Test response"', + 'tool_use_id': 'toolu_0123456789AbCdEfGhIjKlM', + 'type': 'tool_result', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'I have successfully called the function', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content0] + list([ + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content1] + list([ + dict({ + 'content': 'What shape is a donut?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content2] + list([ + dict({ + 'content': list([ + dict({ + 'text': 'What shape is a donut?', + 'type': 'text', + }), + dict({ + 'text': 'Can you tell me?', + 'type': 'text', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + dict({ + 'text': 'Hope this helps.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content3] + list([ + dict({ + 'content': list([ + dict({ + 'text': 'What shape is a donut?', + 'type': 'text', + }), + dict({ + 'text': 'Can you tell me?', + 'type': 'text', + }), + dict({ + 'text': 'Please?', + 'type': 'text', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + dict({ + 'text': 'Hope this helps.', + 'type': 'text', + }), + dict({ + 'text': 'You are welcome.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content4] + list([ + dict({ + 'content': 'Turn off the lights and make me coffee', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Sure.', + 'type': 'text', + }), + dict({ + 'id': 'mock-tool-call-id', + 'input': dict({ + 'domain': 'light', + }), + 'name': 'HassTurnOff', + 'type': 'tool_use', + }), + dict({ + 'id': 'mock-tool-call-id-2', + 'input': dict({ + }), + 'name': 'MakeCoffee', + 'type': 'tool_use', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Thank you', + 'type': 'text', + }), + dict({ + 'content': '{"success": true, "response": "Lights are off."}', + 'tool_use_id': 'mock-tool-call-id', + 'type': 'tool_result', + }), + dict({ + 'content': '{"success": false, "response": "Not enough milk."}', + 'tool_use_id': 'mock-tool-call-id-2', + 'type': 'tool_result', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Should I add milk to the shopping list?', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- # name: test_unknown_hass_api dict({ 'continue_conversation': False, diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py index 5973d9a3ee8..30aba6e1b1f 100644 --- a/tests/components/anthropic/test_config_flow.py +++ b/tests/components/anthropic/test_config_flow.py @@ -21,9 +21,11 @@ from homeassistant.components.anthropic.const import ( CONF_PROMPT, CONF_RECOMMENDED, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, + RECOMMENDED_THINKING_BUDGET, ) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -94,6 +96,28 @@ async def test_options( assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL +async def test_options_thinking_budget_more_than_max( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test error about thinking budget being more than max tokens.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + "prompt": "Speak like a pirate", + "max_tokens": 8192, + "chat_model": "claude-3-7-sonnet-latest", + "temperature": 1, + "thinking_budget": 16384, + }, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.FORM + assert options["errors"] == {"thinking_budget": "thinking_budget_too_large"} + + @pytest.mark.parametrize( ("side_effect", "error"), [ @@ -186,6 +210,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_THINKING_BUDGET: RECOMMENDED_THINKING_BUDGET, }, ), ( @@ -195,6 +220,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_THINKING_BUDGET: RECOMMENDED_THINKING_BUDGET, }, { CONF_RECOMMENDED: True, diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py index 6c8244a59ba..67a4434a664 100644 --- a/tests/components/anthropic/test_conversation.py +++ b/tests/components/anthropic/test_conversation.py @@ -14,13 +14,18 @@ from anthropic.types import ( RawMessageStartEvent, RawMessageStopEvent, RawMessageStreamEvent, + RedactedThinkingBlock, + SignatureDelta, TextBlock, TextDelta, + ThinkingBlock, + ThinkingDelta, ToolUseBlock, Usage, ) from freezegun import freeze_time from httpx import URL, Request, Response +import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol @@ -28,7 +33,7 @@ from homeassistant.components import conversation from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm +from homeassistant.helpers import chat_session, intent, llm from homeassistant.setup import async_setup_component from homeassistant.util import ulid as ulid_util @@ -86,6 +91,57 @@ def create_content_block( ] +def create_thinking_block( + index: int, thinking_parts: list[str] +) -> list[RawMessageStreamEvent]: + """Create a thinking block with the specified deltas.""" + return [ + RawContentBlockStartEvent( + type="content_block_start", + content_block=ThinkingBlock(signature="", thinking="", type="thinking"), + index=index, + ), + *[ + RawContentBlockDeltaEvent( + delta=ThinkingDelta(thinking=thinking_part, type="thinking_delta"), + index=index, + type="content_block_delta", + ) + for thinking_part in thinking_parts + ], + RawContentBlockDeltaEvent( + delta=SignatureDelta( + signature="ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/N" + "oB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ" + "4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo" + "21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==", + type="signature_delta", + ), + index=index, + type="content_block_delta", + ), + RawContentBlockStopEvent(index=index, type="content_block_stop"), + ] + + +def create_redacted_thinking_block(index: int) -> list[RawMessageStreamEvent]: + """Create a redacted thinking block.""" + return [ + RawContentBlockStartEvent( + type="content_block_start", + content_block=RedactedThinkingBlock( + data="EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9K" + "WPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeV" + "sJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOK" + "iKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny", + type="redacted_thinking", + ), + index=index, + ), + RawContentBlockStopEvent(index=index, type="content_block_stop"), + ] + + def create_tool_use_block( index: int, tool_id: str, tool_name: str, json_parts: list[str] ) -> list[RawMessageStreamEvent]: @@ -381,7 +437,7 @@ async def test_function_exception( return stream_generator( create_messages( [ - *create_content_block(0, "Certainly, calling it now!"), + *create_content_block(0, ["Certainly, calling it now!"]), *create_tool_use_block( 1, "toolu_0123456789AbCdEfGhIjKlM", @@ -464,7 +520,7 @@ async def test_assist_api_tools_conversion( new_callable=AsyncMock, return_value=stream_generator( create_messages( - create_content_block(0, "Hello, how can I help you?"), + create_content_block(0, ["Hello, how can I help you?"]), ), ), ) as mock_create: @@ -509,7 +565,7 @@ async def test_conversation_id( def create_stream_generator(*args, **kwargs) -> Any: return stream_generator( create_messages( - create_content_block(0, "Hello, how can I help you?"), + create_content_block(0, ["Hello, how can I help you?"]), ), ) @@ -547,3 +603,283 @@ async def test_conversation_id( ) assert result.conversation_id == "koala" + + +async def test_extended_thinking( + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, +) -> None: + """Test extended thinking support.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_thinking_block( + 0, + [ + "The user has just", + ' greeted me with "Hi".', + " This is a simple greeting an", + "d doesn't require any Home Assistant function", + " calls. I should respond with", + " a friendly greeting and let them know I'm available", + " to help with their smart home.", + ], + ), + *create_content_block(1, ["Hello, how can I help you today?"]), + ] + ), + ), + ): + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + assert len(chat_log.content) == 3 + assert chat_log.content[1].content == "hello" + assert chat_log.content[2].content == "Hello, how can I help you today?" + + +async def test_redacted_thinking( + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, +) -> None: + """Test extended thinking with redacted thinking blocks.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_redacted_thinking_block(0), + *create_redacted_thinking_block(1), + *create_redacted_thinking_block(2), + *create_content_block(3, ["How can I help you today?"]), + ] + ), + ), + ): + result = await conversation.async_converse( + hass, + "ANTHROPIC_MAGIC_STRING_TRIGGER_REDACTED_THINKING_46C9A13E193C177646C7398A9" + "8432ECCCE4C1253D5E2D82641AC0E52CC2876CB", + None, + Context(), + agent_id="conversation.claude", + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + assert len(chat_log.content) == 3 + assert chat_log.content[2].content == "How can I help you today?" + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_extended_thinking_tool_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, +) -> None: + """Test that thinking blocks and their order are preserved in with tool calls.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return stream_generator( + create_messages( + create_content_block( + 0, ["I have ", "successfully called ", "the function"] + ), + ) + ) + + return stream_generator( + create_messages( + [ + *create_thinking_block( + 0, + [ + "The user asked me to", + " call a test function.", + "Is it a test? What", + " would the function", + " do? Would it violate", + " any privacy or security", + " policies?", + ], + ), + *create_redacted_thinking_block(1), + *create_thinking_block( + 2, ["Okay, let's give it a shot.", " Will I pass the test?"] + ), + *create_content_block(3, ["Certainly, calling it now!"]), + *create_tool_use_block( + 1, + "toolu_0123456789AbCdEfGhIjKlM", + "test_tool", + ['{"para', 'm1": "test_valu', 'e"}'], + ), + ] + ) + ) + + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-03 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + + assert chat_log.content == snapshot + assert mock_create.mock_calls[1][2]["messages"] == snapshot + + +@pytest.mark.parametrize( + "content", + [ + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.UserContent("Can you tell me?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="Hope this helps." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.UserContent("Can you tell me?"), + conversation.chat_log.UserContent("Please?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="Hope this helps." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="You are welcome." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("Turn off the lights and make me coffee"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", + content="Sure.", + tool_calls=[ + llm.ToolInput( + id="mock-tool-call-id", + tool_name="HassTurnOff", + tool_args={"domain": "light"}, + ), + llm.ToolInput( + id="mock-tool-call-id-2", + tool_name="MakeCoffee", + tool_args={}, + ), + ], + ), + conversation.chat_log.UserContent("Thank you"), + conversation.chat_log.ToolResultContent( + agent_id="conversation.claude", + tool_call_id="mock-tool-call-id", + tool_name="HassTurnOff", + tool_result={"success": True, "response": "Lights are off."}, + ), + conversation.chat_log.ToolResultContent( + agent_id="conversation.claude", + tool_call_id="mock-tool-call-id-2", + tool_name="MakeCoffee", + tool_result={"success": False, "response": "Not enough milk."}, + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", + content="Should I add milk to the shopping list?", + ), + ], + ], +) +async def test_history_conversion( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, + content: list[conversation.chat_log.Content], +) -> None: + """Test conversion of chat_log entries into API parameters.""" + conversation_id = "conversation_id" + with ( + chat_session.async_get_chat_session(hass, conversation_id) as session, + conversation.async_get_chat_log(hass, session) as chat_log, + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_content_block(0, ["Yes, I am sure!"]), + ] + ), + ), + ) as mock_create, + ): + chat_log.content = content + + await conversation.async_converse( + hass, + "Are you sure?", + conversation_id, + Context(), + agent_id="conversation.claude", + ) + + assert mock_create.mock_calls[0][2]["messages"] == snapshot diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 2375d48fcf9..f772f877d3a 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -8,6 +8,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -85,6 +86,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -105,6 +107,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -182,6 +185,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -202,6 +206,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -279,6 +284,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -299,6 +305,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -400,6 +407,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -420,6 +428,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }), @@ -620,6 +629,7 @@ 'pipeline': , 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }), diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index d937b5396d1..57ae0095236 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -10,6 +10,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -81,6 +82,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -99,6 +101,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -170,6 +173,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -200,6 +204,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -271,6 +276,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -289,6 +295,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -382,6 +389,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -400,6 +408,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -607,6 +616,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -660,6 +670,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -675,6 +686,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -690,6 +702,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -705,6 +718,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -720,6 +734,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -853,6 +868,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -868,6 +884,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -924,6 +941,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -939,6 +957,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -998,6 +1017,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) @@ -1013,6 +1033,7 @@ }), 'tts_output': dict({ 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', 'url': '/api/tts_proxy/mocked-token.mp3', }), }) diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index a7f6fbf7553..d67a0fd1726 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -684,7 +684,7 @@ def test_fallback_intent_filter() -> None: entities_list=[], ) ) - is True + is False ) assert ( _async_local_fallback_intent_filter( diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py index 42b4adf742c..b9f6da6f96c 100644 --- a/tests/components/assist_satellite/test_entity.py +++ b/tests/components/assist_satellite/test_entity.py @@ -31,6 +31,8 @@ from homeassistant.exceptions import HomeAssistantError from . import ENTITY_ID from .conftest import MockAssistSatellite +from tests.components.tts.common import MockResultStream + @pytest.fixture def mock_chat_session_conversation_id() -> Generator[Mock]: @@ -186,8 +188,9 @@ async def test_new_pipeline_cancels_pipeline( {"message": "Hello"}, AssistSatelliteAnnouncement( message="Hello", - media_id="https://www.home-assistant.io/resolved.mp3", + media_id="http://10.10.10.10:8123/api/tts_proxy/test-token", original_media_id="media-source://bla", + tts_token="test-token", media_id_source="tts", ), ), @@ -200,6 +203,7 @@ async def test_new_pipeline_cancels_pipeline( message="Hello", media_id="https://www.home-assistant.io/resolved.mp3", original_media_id="media-source://given", + tts_token=None, media_id_source="media_id", ), ), @@ -209,9 +213,24 @@ async def test_new_pipeline_cancels_pipeline( message="", media_id="http://example.com/bla.mp3", original_media_id="http://example.com/bla.mp3", + tts_token=None, media_id_source="url", ), ), + ( + { + "media_id": "http://example.com/bla.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/bla.mp3", + original_media_id="http://example.com/bla.mp3", + tts_token=None, + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), ], ) async def test_announce( @@ -243,9 +262,17 @@ async def test_announce( with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", new=tts_generate_media_source_id, ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), patch( "homeassistant.components.media_source.async_resolve_media", return_value=PlayMedia( @@ -500,7 +527,8 @@ async def test_vad_sensitivity_entity_not_found( "Better system prompt", AssistSatelliteAnnouncement( message="Hello", - media_id="https://www.home-assistant.io/resolved.mp3", + media_id="http://10.10.10.10:8123/api/tts_proxy/test-token", + tts_token="test-token", original_media_id="media-source://generated", media_id_source="tts", ), @@ -517,6 +545,7 @@ async def test_vad_sensitivity_entity_not_found( AssistSatelliteAnnouncement( message="Hello", media_id="https://www.home-assistant.io/resolved.mp3", + tts_token=None, original_media_id="media-source://given", media_id_source="media_id", ), @@ -530,11 +559,30 @@ async def test_vad_sensitivity_entity_not_found( AssistSatelliteAnnouncement( message="", media_id="http://example.com/given.mp3", + tts_token=None, original_media_id="http://example.com/given.mp3", media_id_source="url", ), ), ), + ( + { + "start_media_id": "http://example.com/given.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + ( + "mock-conversation-id", + None, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/given.mp3", + tts_token=None, + original_media_id="http://example.com/given.mp3", + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), + ), ], ) @pytest.mark.usefixtures("mock_chat_session_conversation_id") @@ -546,6 +594,13 @@ async def test_start_conversation( expected_params: tuple[str, str], ) -> None: """Test starting a conversation on a device.""" + original_start_conversation = entity.async_start_conversation + + async def async_start_conversation(start_announcement): + # Verify state change + assert entity.state == AssistSatelliteState.RESPONDING + await original_start_conversation(start_announcement) + await async_update_pipeline( hass, async_get_pipeline(hass), @@ -554,9 +609,17 @@ async def test_start_conversation( with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://generated", ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), patch( "homeassistant.components.media_source.async_resolve_media", return_value=PlayMedia( @@ -564,6 +627,7 @@ async def test_start_conversation( mime_type="audio/mp3", ), ), + patch.object(entity, "async_start_conversation", new=async_start_conversation), ): await hass.services.async_call( "assist_satellite", @@ -572,6 +636,7 @@ async def test_start_conversation( target={"entity_id": "assist_satellite.test_entity"}, blocking=True, ) + assert entity.state == AssistSatelliteState.IDLE assert entity.start_conversations[0] == expected_params diff --git a/tests/components/assist_satellite/test_intent.py b/tests/components/assist_satellite/test_intent.py index 9304229dbe3..0e531811adc 100644 --- a/tests/components/assist_satellite/test_intent.py +++ b/tests/components/assist_satellite/test_intent.py @@ -4,28 +4,28 @@ from unittest.mock import patch import pytest -from homeassistant.components.media_source import PlayMedia from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component from .conftest import TEST_DOMAIN, MockAssistSatellite +from tests.components.tts.common import MockResultStream + @pytest.fixture -def mock_tts(): +async def mock_tts(hass: HomeAssistant): """Mock TTS service.""" + assert await async_setup_component(hass, "tts", {}) with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://bla", ), patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), ): yield @@ -41,9 +41,13 @@ async def test_broadcast_intent( ) -> None: """Test we can invoke a broadcast intent.""" - result = await intent.async_handle( - hass, "test", intent.INTENT_BROADCAST, {"message": {"value": "Hello"}} - ) + with patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ): + result = await intent.async_handle( + hass, "test", intent.INTENT_BROADCAST, {"message": {"value": "Hello"}} + ) assert result.as_dict() == { "card": {}, @@ -71,13 +75,17 @@ async def test_broadcast_intent( assert len(entity2.announcements) == 1 assert len(entity_no_features.announcements) == 0 - result = await intent.async_handle( - hass, - "test", - intent.INTENT_BROADCAST, - {"message": {"value": "Hello"}}, - device_id=entity.device_entry.id, - ) + with patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ): + result = await intent.async_handle( + hass, + "test", + intent.INTENT_BROADCAST, + {"message": {"value": "Hello"}}, + device_id=entity.device_entry.id, + ) # Broadcast doesn't targets device that triggered it. assert result.as_dict() == { "card": {}, diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index 0b8f35497c6..3fe4d470a63 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -131,7 +131,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CI latest build id', + 'original_name': 'CI latest build ID', 'platform': 'azure_devops', 'previous_unique_id': None, 'supported_features': 0, @@ -143,7 +143,7 @@ # name: test_sensors[sensor.testproject_ci_latest_build_id-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build id', + 'friendly_name': 'testproject CI latest build ID', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_id', @@ -462,7 +462,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CI latest build url', + 'original_name': 'CI latest build URL', 'platform': 'azure_devops', 'previous_unique_id': None, 'supported_features': 0, @@ -474,7 +474,7 @@ # name: test_sensors[sensor.testproject_ci_latest_build_url-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build url', + 'friendly_name': 'testproject CI latest build URL', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_url', @@ -526,7 +526,7 @@ # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_id-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build id', + 'friendly_name': 'testproject CI latest build ID', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_id', @@ -619,7 +619,7 @@ # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build url', + 'friendly_name': 'testproject CI latest build URL', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_url', diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index e6e4b2f8a50..3197cbfadeb 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,9 +2,9 @@ from __future__ import annotations -from collections.abc import AsyncIterator, Callable, Coroutine, Iterable +from collections.abc import AsyncIterator, Buffer, Callable, Coroutine, Iterable from pathlib import Path -from typing import Any +from typing import Any, cast from unittest.mock import AsyncMock, Mock, patch from homeassistant.components.backup import ( @@ -16,6 +16,7 @@ from homeassistant.components.backup import ( BackupNotFound, Folder, ) +from homeassistant.components.backup.backup import CoreLocalBackupAgent from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.backup import async_initialize_backup @@ -69,7 +70,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo async def delete_backup(backup_id: str, **kwargs: Any) -> None: """Mock delete.""" - get_backup(backup_id) + await get_backup(backup_id) async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Mock download.""" @@ -77,7 +78,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: """Get a backup.""" - backup = next((b for b in backups if b.backup_id == backup_id), None) + backup = next((b for b in _backups if b.backup_id == backup_id), None) if backup is None: raise BackupNotFound return backup @@ -89,15 +90,15 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo **kwargs: Any, ) -> None: """Upload a backup.""" - backups.append(backup) + _backups.append(backup) backup_stream = await open_stream() backup_data = bytearray() async for chunk in backup_stream: backup_data += chunk backups_data[backup.backup_id] = backup_data - backups = backups or [] - backups_data: dict[str, bytes] = {} + _backups = backups or [] + backups_data: dict[str, Buffer] = {} mock_agent = Mock(spec=BackupAgent) mock_agent.domain = TEST_DOMAIN mock_agent.name = name @@ -113,7 +114,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo side_effect=get_backup, spec_set=[BackupAgent.async_get_backup] ) mock_agent.async_list_backups = AsyncMock( - return_value=backups, spec_set=[BackupAgent.async_list_backups] + return_value=_backups, spec_set=[BackupAgent.async_list_backups] ) mock_agent.async_upload_backup = AsyncMock( side_effect=upload_backup, @@ -160,11 +161,18 @@ async def setup_backup_integration( if LOCAL_AGENT_ID not in backups or with_hassio: return remote_agents_dict - agent = hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + local_agent = cast( + CoreLocalBackupAgent, hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + ) for backup in backups[LOCAL_AGENT_ID]: - await agent.async_upload_backup(open_stream=None, backup=backup) - agent._loaded_backups = True + await local_agent.async_upload_backup( + open_stream=AsyncMock( + side_effect=RuntimeError("Local agent does not open stream") + ), + backup=backup, + ) + local_agent._loaded_backups = True return remote_agents_dict diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index eb38399eb79..d391df44475 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -61,24 +61,49 @@ def path_glob_fixture(hass: HomeAssistant) -> Generator[MagicMock]: CONFIG_DIR = { - "testing_config": [ + "tests/testing_config": [ Path("test.txt"), Path(".DS_Store"), Path(".storage"), + Path("another_subdir"), Path("backups"), Path("tmp_backups"), + Path("tts"), Path("home-assistant_v2.db"), ], - "backups": [ + "/backups": [ Path("backups/backup.tar"), Path("backups/not_backup"), ], - "tmp_backups": [ + "/another_subdir": [ + Path("another_subdir/.DS_Store"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + ], + "another_subdir/backups": [ + Path("another_subdir/backups/backup.tar"), + Path("another_subdir/backups/not_backup"), + ], + "another_subdir/tts": [ + Path("another_subdir/tts/voice.mp3"), + ], + "/tmp_backups": [ # noqa: S108 Path("tmp_backups/forgotten_backup.tar"), Path("tmp_backups/not_backup"), ], + "/tts": [ + Path("tts/voice.mp3"), + ], +} +CONFIG_DIR_DIRS = { + Path(".storage"), + Path("another_subdir"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + Path("backups"), + Path("tmp_backups"), + Path("tts"), } -CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} @pytest.fixture(name="create_backup") @@ -105,7 +130,10 @@ def mock_backup_generation_fixture( """Mock backup generator.""" with ( - patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch( + "pathlib.Path.iterdir", + lambda x: CONFIG_DIR.get(f"{x.parent.name}/{x.name}", []), + ), patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 28ee9b834c1..7cbbb9ddbce 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -114,9 +114,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -148,9 +148,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -182,9 +182,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -216,9 +216,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -250,9 +250,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', diff --git a/tests/components/backup/snapshots/test_diagnostics.ambr b/tests/components/backup/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..cf412970204 --- /dev/null +++ b/tests/components/backup/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'backup_agents': list([ + dict({ + 'agent_id': 'backup.local', + 'name': 'local', + }), + ]), + 'backup_config': dict({ + 'agents': dict({ + }), + 'automatic_backups_configured': False, + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'days': list([ + ]), + 'recurrence': 'never', + 'state': 'never', + 'time': None, + }), + }), + }) +# --- diff --git a/tests/components/backup/snapshots/test_sensors.ambr b/tests/components/backup/snapshots/test_sensors.ambr new file mode 100644 index 00000000000..924038ef81f --- /dev/null +++ b/tests/components/backup/snapshots/test_sensors.ambr @@ -0,0 +1,160 @@ +# serializer version: 1 +# name: test_sensors[sensor.backup_backup_manager_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_backup_manager_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Backup Manager State', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_manager_state', + 'unique_id': 'backup_manager_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_backup_manager_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Backup Backup Manager State', + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'context': , + 'entity_id': 'sensor.backup_backup_manager_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last successful automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_successful_automatic_backup', + 'unique_id': 'last_successful_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Last successful automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Next scheduled automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_scheduled_automatic_backup', + 'unique_id': 'next_scheduled_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Next scheduled automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 6ecb508d9e9..0bef632f0b4 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -3951,9 +3951,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -3981,9 +3981,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4032,9 +4032,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4062,9 +4062,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4113,9 +4113,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4175,9 +4175,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4221,9 +4221,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4278,9 +4278,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4333,9 +4333,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4395,9 +4395,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4458,9 +4458,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4522,9 +4522,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4584,9 +4584,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4646,9 +4646,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4709,9 +4709,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4773,9 +4773,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5350,9 +5350,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5401,9 +5401,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5456,9 +5456,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5534,9 +5534,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5586,9 +5586,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5638,9 +5638,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5690,9 +5690,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', diff --git a/tests/components/backup/test_diagnostics.py b/tests/components/backup/test_diagnostics.py new file mode 100644 index 00000000000..a66b4a9a2ea --- /dev/null +++ b/tests/components/backup/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests the diagnostics for Home Assistant Backup integration.""" + +from syrupy import SnapshotAssertion + +from homeassistant.components.backup.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .common import setup_backup_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + diag_data = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag_data == snapshot diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index 8a0cc2b97c0..10bd2d8b97a 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -6,11 +6,13 @@ from unittest.mock import patch import pytest from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -141,3 +143,17 @@ async def test_create_automatic_service( ) generate_backup.assert_called_once_with(**expected_kwargs) + + +async def test_setup_entry( + hass: HomeAssistant, +) -> None: + """Test setup backup config entry.""" + await setup_backup_integration(hass, with_hassio=False) + entry = MockConfigEntry(domain=DOMAIN, source=SOURCE_SYSTEM) + entry.add_to_hass(hass) + + with patch("homeassistant.components.backup.PLATFORMS", return_value=[]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index e4762f35327..04072dae864 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -47,7 +47,8 @@ from homeassistant.components.backup.manager import ( WrittenBackup, ) from homeassistant.components.backup.util import password_to_key -from homeassistant.core import HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import CoreState, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir @@ -67,10 +68,17 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator _EXPECTED_FILES = [ "test.txt", ".storage", + "another_subdir", + "another_subdir/backups", + "another_subdir/backups/backup.tar", + "another_subdir/backups/not_backup", + "another_subdir/tts", + "another_subdir/tts/voice.mp3", "backups", "backups/not_backup", "tmp_backups", "tmp_backups/not_backup", + "tts", ] _EXPECTED_FILES_WITH_DATABASE = { True: [*_EXPECTED_FILES, "home-assistant_v2.db"], @@ -537,7 +545,7 @@ async def test_initiate_backup( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -770,7 +778,7 @@ async def test_initiate_backup_with_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -862,7 +870,7 @@ async def test_initiate_backup_with_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": { + "last_action_event": { "manager_state": "create_backup", "reason": "upload_failed", "stage": None, @@ -1152,7 +1160,7 @@ async def test_initiate_backup_non_agent_upload_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1249,7 +1257,7 @@ async def test_initiate_backup_with_task_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1345,7 +1353,7 @@ async def test_initiate_backup_file_error_upload_to_agents( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1469,7 +1477,7 @@ async def test_initiate_backup_file_error_create_backup( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1966,7 +1974,7 @@ async def test_receive_backup_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2049,7 +2057,7 @@ async def test_receive_backup_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": { + "last_action_event": { "manager_state": "receive_backup", "reason": None, "stage": None, @@ -2102,7 +2110,7 @@ async def test_receive_backup_non_agent_upload_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2214,7 +2222,7 @@ async def test_receive_backup_file_write_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2310,7 +2318,7 @@ async def test_receive_backup_read_tar_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2475,7 +2483,7 @@ async def test_receive_backup_file_read_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3286,7 +3294,7 @@ async def test_initiate_backup_per_agent_encryption( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3389,7 +3397,7 @@ async def test_initiate_backup_per_agent_encryption( @pytest.mark.parametrize( - ("restore_result", "last_non_idle_event"), + ("restore_result", "last_action_event"), [ ( {"error": None, "error_type": None, "success": True}, @@ -3415,7 +3423,7 @@ async def test_restore_progress_after_restart( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, restore_result: dict[str, Any], - last_non_idle_event: dict[str, Any], + last_action_event: dict[str, Any], ) -> None: """Test restore backup progress after restart.""" @@ -3433,7 +3441,7 @@ async def test_restore_progress_after_restart( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": last_non_idle_event, + "last_action_event": last_action_event, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3459,7 +3467,7 @@ async def test_restore_progress_after_restart_fail_to_remove( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3469,3 +3477,66 @@ async def test_restore_progress_after_restart_fail_to_remove( "Unexpected error deleting backup restore result file: Boom!" in caplog.text ) + + +async def test_manager_blocked_until_home_assistant_started( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test backup manager's state is blocked until Home Assistant has started.""" + + hass.set_state(CoreState.not_running) + + await setup_backup_integration(hass) + manager = hass.data[DATA_MANAGER] + + assert manager.state == BackupManagerState.BLOCKED + assert manager.last_action_event is None + + # Fired when Home Assistant changes to starting state + hass.bus.async_fire(EVENT_HOMEASSISTANT_START) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert manager.state == BackupManagerState.BLOCKED + assert manager.last_action_event is None + + # Fired when Home Assistant changes to running state + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + assert manager.state == BackupManagerState.IDLE + assert manager.last_action_event is None + + +async def test_manager_not_blocked_after_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test restore backup progress after restart.""" + restore_result = {"error": None, "error_type": None, "success": True} + + hass.set_state(CoreState.not_running) + with patch( + "pathlib.Path.read_bytes", return_value=json.dumps(restore_result).encode() + ): + await setup_backup_integration(hass) + + ws_client = await hass_ws_client(hass) + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == { + "agent_errors": {}, + "backups": [], + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, + "last_action_event": { + "manager_state": "restore_backup", + "reason": None, + "stage": None, + "state": "completed", + }, + "next_automatic_backup": None, + "next_automatic_backup_additional": False, + "state": "idle", + } diff --git a/tests/components/backup/test_sensors.py b/tests/components/backup/test_sensors.py new file mode 100644 index 00000000000..bee61887ea5 --- /dev/null +++ b/tests/components/backup/test_sensors.py @@ -0,0 +1,119 @@ +"""Tests for the sensors of the Backup integration.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import store +from homeassistant.components.backup.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import setup_backup_integration + +from tests.common import async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_sensors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of backup sensors.""" + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + # start backup and check sensor states again + client = await hass_ws_client(hass) + await hass.async_block_till_done() + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + + assert await client.receive_json() + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "create_backup" + + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "idle" + + +async def test_sensor_updates( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, +) -> None: + """Test update of backup sensors.""" + # Ensure created backup is already protected, + # to avoid manager creating a new EncryptedBackupStreamer + # instead of using the already mocked stream writer. + created_backup: MagicMock = create_backup.return_value[1].result().backup + created_backup.protected = True + + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-12T12:00:00+01:00") + storage_data = { + "backups": [], + "config": { + "agents": {}, + "automatic_backups_configured": True, + "create_backup": { + "agent_ids": ["test.remote"], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_automatic_backup": "2024-11-11T04:45:00+01:00", + "last_completed_automatic_backup": "2024-11-11T04:45:00+01:00", + "schedule": { + "days": [], + "recurrence": "daily", + "state": "never", + "time": "06:00", + }, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": store.STORAGE_VERSION, + "minor_version": store.STORAGE_VERSION_MINOR, + } + + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration( + hass, with_hassio=False, remote_agents=["test.remote"] + ) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-11T03:45:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-13T05:00:00+00:00" + + freezer.move_to("2024-11-13T12:00:00+01:00") + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-13T11:00:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-14T05:00:00+00:00" diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index 850184a7d71..9c23833518e 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -26,10 +26,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE, Platform from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import client_update, init_integration @@ -97,11 +98,10 @@ async def test_spa_temperature_unit( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test temperature unit conversions.""" - with patch.object( - hass.config.units, "temperature_unit", UnitOfTemperature.FAHRENHEIT - ): - state = await _patch_spa_settemp(hass, client, 0, 15.4) - assert state.attributes.get(ATTR_TEMPERATURE) == 15.0 + hass.config.units = US_CUSTOMARY_SYSTEM + + state = await _patch_spa_settemp(hass, client, 0, 15.4) + assert state.attributes.get(ATTR_TEMPERATURE) == 15.0 async def test_spa_hvac_modes( diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py index d0e0f75991b..a4d5eecd744 100644 --- a/tests/components/bluesound/test_config_flow.py +++ b/tests/components/bluesound/test_config_flow.py @@ -1,5 +1,6 @@ """Test the Bluesound config flow.""" +from ipaddress import IPv4Address, IPv6Address from unittest.mock import AsyncMock from pyblu.errors import PlayerUnreachableError @@ -121,8 +122,8 @@ async def test_zeroconf_flow_success( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], + ip_address=IPv4Address("1.1.1.1"), + ip_addresses=[IPv4Address("1.1.1.1")], port=11000, hostname="player-name1111", type="_musc._tcp.local.", @@ -160,8 +161,8 @@ async def test_zeroconf_flow_cannot_connect( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], + ip_address=IPv4Address("1.1.1.1"), + ip_addresses=[IPv4Address("1.1.1.1")], port=11000, hostname="player-name1111", type="_musc._tcp.local.", @@ -187,8 +188,8 @@ async def test_zeroconf_flow_already_configured( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=ZeroconfServiceInfo( - ip_address="1.1.1.2", - ip_addresses=["1.1.1.2"], + ip_address=IPv4Address("1.1.1.2"), + ip_addresses=[IPv4Address("1.1.1.2")], port=11000, hostname="player-name1112", type="_musc._tcp.local.", @@ -203,3 +204,23 @@ async def test_zeroconf_flow_already_configured( assert config_entry.data[CONF_HOST] == "1.1.1.2" player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() + + +async def test_zeroconf_flow_no_ipv4_address(hass: HomeAssistant) -> None: + """Test abort flow when no ipv4 address is found in zeroconf data.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address=IPv6Address("2001:db8::1"), + ip_addresses=[IPv6Address("2001:db8::1")], + port=11000, + hostname="player-name1112", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_ipv4_address" diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index be23a536f49..48d1a38375d 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -1019,8 +1019,6 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( @@ -1446,8 +1444,6 @@ async def test_bluetooth_rediscover( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( @@ -1625,8 +1621,6 @@ async def test_bluetooth_rediscover_no_match( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( diff --git a/tests/components/bosch_alarm/__init__.py b/tests/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..2b2d94cf1e5 --- /dev/null +++ b/tests/components/bosch_alarm/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Bosch Alarm component.""" + +from unittest.mock import AsyncMock + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def call_observable(hass: HomeAssistant, observable: AsyncMock) -> None: + """Call the observable with the given event.""" + for callback in observable.attach.call_args_list: + callback[0][0]() + await hass.async_block_till_done() diff --git a/tests/components/bosch_alarm/conftest.py b/tests/components/bosch_alarm/conftest.py new file mode 100644 index 00000000000..45ec0072a37 --- /dev/null +++ b/tests/components/bosch_alarm/conftest.py @@ -0,0 +1,131 @@ +"""Define fixtures for Bosch Alarm tests.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +from bosch_alarm_mode2.panel import Area +from bosch_alarm_mode2.utils import Observable +import pytest + +from homeassistant.components.bosch_alarm.const import ( + CONF_INSTALLER_CODE, + CONF_USER_CODE, + DOMAIN, +) +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PASSWORD, CONF_PORT + +from tests.common import MockConfigEntry + + +@pytest.fixture( + params=[ + "solution_3000", + "amax_3000", + "b5512", + ] +) +def model(request: pytest.FixtureRequest) -> Generator[str]: + """Return every device.""" + return request.param + + +@pytest.fixture +def extra_config_entry_data( + model: str, model_name: str, config_flow_data: dict[str, Any] +) -> dict[str, Any]: + """Return extra config entry data.""" + return {CONF_MODEL: model_name} | config_flow_data + + +@pytest.fixture +def config_flow_data(model: str) -> dict[str, Any]: + """Return extra config entry data.""" + if model == "solution_3000": + return {CONF_USER_CODE: "1234"} + if model == "amax_3000": + return {CONF_INSTALLER_CODE: "1234", CONF_PASSWORD: "1234567890"} + if model == "b5512": + return {CONF_PASSWORD: "1234567890"} + pytest.fail("Invalid model") + + +@pytest.fixture +def model_name(model: str) -> str | None: + """Return extra config entry data.""" + return { + "solution_3000": "Solution 3000", + "amax_3000": "AMAX 3000", + "b5512": "B5512 (US1B)", + }.get(model) + + +@pytest.fixture +def serial_number(model: str) -> str | None: + """Return extra config entry data.""" + if model == "solution_3000": + return "1234567890" + return None + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bosch_alarm.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def area() -> Generator[Area]: + """Define a mocked area.""" + mock = AsyncMock(spec=Area) + mock.name = "Area1" + mock.status_observer = AsyncMock(spec=Observable) + mock.is_triggered.return_value = False + mock.is_disarmed.return_value = True + mock.is_arming.return_value = False + mock.is_pending.return_value = False + mock.is_part_armed.return_value = False + mock.is_all_armed.return_value = False + return mock + + +@pytest.fixture +def mock_panel( + area: AsyncMock, model_name: str, serial_number: str | None +) -> Generator[AsyncMock]: + """Define a fixture to set up Bosch Alarm.""" + with ( + patch( + "homeassistant.components.bosch_alarm.Panel", autospec=True + ) as mock_panel, + patch("homeassistant.components.bosch_alarm.config_flow.Panel", new=mock_panel), + ): + client = mock_panel.return_value + client.areas = {1: area} + client.model = model_name + client.firmware_version = "1.0.0" + client.serial_number = serial_number + client.connection_status_observer = AsyncMock(spec=Observable) + yield client + + +@pytest.fixture +def mock_config_entry( + extra_config_entry_data: dict[str, Any], serial_number: str | None +) -> MockConfigEntry: + """Mock config entry for bosch alarm.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=serial_number, + entry_id="01JQ917ACKQ33HHM7YCFXYZX51", + data={ + CONF_HOST: "0.0.0.0", + CONF_PORT: 7700, + CONF_MODEL: "bosch_alarm_test_data.model", + } + | extra_config_entry_data, + ) diff --git a/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..76568cef56c --- /dev/null +++ b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,154 @@ +# serializer version: 1 +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1234567890_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- diff --git a/tests/components/bosch_alarm/test_alarm_control_panel.py b/tests/components/bosch_alarm/test_alarm_control_panel.py new file mode 100644 index 00000000000..31d2f928ec5 --- /dev/null +++ b/tests/components/bosch_alarm/test_alarm_control_panel.py @@ -0,0 +1,145 @@ +"""Tests for Bosch Alarm component.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import call_observable, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[None]: + """Return the platforms to be loaded for this test.""" + with patch( + "homeassistant.components.bosch_alarm.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] + ): + yield + + +async def test_update_alarm_device( + hass: HomeAssistant, + mock_panel: AsyncMock, + area: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that alarm panel state changes after arming the panel.""" + await setup_integration(hass, mock_config_entry) + entity_id = "alarm_control_panel.area1" + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + area.is_arming.return_value = True + area.is_disarmed.return_value = False + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_all_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_all_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_disarmed.return_value = False + area.is_arming.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_part_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + area.is_part_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + +async def test_alarm_control_panel( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel state.""" + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_alarm_control_panel_availability( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel availability.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("alarm_control_panel.area1").state + == AlarmControlPanelState.DISARMED + ) + + mock_panel.connection_status.return_value = False + + await call_observable(hass, mock_panel.connection_status_observer) + + assert hass.states.get("alarm_control_panel.area1").state == STATE_UNAVAILABLE diff --git a/tests/components/bosch_alarm/test_config_flow.py b/tests/components/bosch_alarm/test_config_flow.py new file mode 100644 index 00000000000..066b3008821 --- /dev/null +++ b/tests/components/bosch_alarm/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests for the bosch_alarm config flow.""" + +import asyncio +from typing import Any +from unittest.mock import AsyncMock + +import pytest + +from homeassistant import config_entries +from homeassistant.components.bosch_alarm.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + model_name: str, + serial_number: str, + config_flow_data: dict[str, Any], +) -> None: + """Test the config flow for bosch_alarm.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Bosch {model_name}" + assert ( + result["data"] + == { + CONF_HOST: "1.1.1.1", + CONF_PORT: 7700, + CONF_MODEL: model_name, + } + | config_flow_data + ) + assert result["result"].unique_id == serial_number + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (PermissionError, "invalid_auth"), + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize("model", ["solution_3000", "amax_3000"]) +async def test_entry_already_configured_host( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize("model", ["b5512"]) +async def test_entry_already_configured_serial( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/bosch_alarm/test_init.py b/tests/components/bosch_alarm/test_init.py new file mode 100644 index 00000000000..0497a91eadf --- /dev/null +++ b/tests/components/bosch_alarm/test_init.py @@ -0,0 +1,33 @@ +"""Tests for bosch alarm integration init.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +def disable_platform_only(): + """Disable platforms to speed up tests.""" + with patch("homeassistant.components.bosch_alarm.PLATFORMS", []): + yield + + +@pytest.mark.parametrize("model", ["solution_3000"]) +@pytest.mark.parametrize("exception", [PermissionError(), TimeoutError()]) +async def test_incorrect_auth( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Test errors with incorrect auth.""" + mock_panel.connect.side_effect = exception + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/bring/snapshots/test_diagnostics.ambr b/tests/components/bring/snapshots/test_diagnostics.ambr index 951c3d3f808..3f4c8f5f339 100644 --- a/tests/components/bring/snapshots/test_diagnostics.ambr +++ b/tests/components/bring/snapshots/test_diagnostics.ambr @@ -128,27 +128,29 @@ }), 'lst': dict({ 'listUuid': 'b4776778-7f6c-496e-951b-92a35d3db0dd', - 'name': 'Baumarkt', + 'name': '**REDACTED**', 'theme': 'ch.publisheria.bring.theme.home', }), 'users': dict({ 'users': list([ dict({ 'country': 'DE', - 'email': 'test-email', + 'email': '**REDACTED**', 'language': 'de', - 'name': 'Bring', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '9a21fdfc-63a4-441a-afc1-ef3030605a9d', 'pushEnabled': True, }), dict({ 'country': 'US', - 'email': 'EMAIL', + 'email': '**REDACTED**', 'language': 'en', - 'name': 'NAME', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '73af455f-c158-4004-a5e0-79f4f8a6d4bd', 'pushEnabled': True, @@ -159,6 +161,7 @@ 'language': 'en', 'name': None, 'photoPath': None, + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '7d5e9d08-877a-4c36-8740-a9bf74ec690a', 'pushEnabled': True, @@ -292,27 +295,29 @@ }), 'lst': dict({ 'listUuid': 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5', - 'name': 'Einkauf', + 'name': '**REDACTED**', 'theme': 'ch.publisheria.bring.theme.home', }), 'users': dict({ 'users': list([ dict({ 'country': 'DE', - 'email': 'test-email', + 'email': '**REDACTED**', 'language': 'de', - 'name': 'Bring', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '9a21fdfc-63a4-441a-afc1-ef3030605a9d', 'pushEnabled': True, }), dict({ 'country': 'US', - 'email': 'EMAIL', + 'email': '**REDACTED**', 'language': 'en', - 'name': 'NAME', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '73af455f-c158-4004-a5e0-79f4f8a6d4bd', 'pushEnabled': True, @@ -323,6 +328,7 @@ 'language': 'en', 'name': None, 'photoPath': None, + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '7d5e9d08-877a-4c36-8740-a9bf74ec690a', 'pushEnabled': True, diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2dcf007c6d4..e02230892bf 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -87,7 +87,7 @@ async def test_user_setup_options(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"known_hosts": "192.168.0.1, , 192.168.0.2 "} + result["flow_id"], {"known_hosts": ["192.168.0.1", "", " ", "192.168.0.2 "]} ) users = await hass.auth.async_get_users() @@ -152,13 +152,13 @@ def get_suggested(schema, key): @pytest.mark.parametrize( - "parameter_data", + ("parameter", "initial", "suggested", "user_input", "updated"), [ ( "known_hosts", ["192.168.0.10", "192.168.0.11"], - "192.168.0.10,192.168.0.11", - "192.168.0.1, , 192.168.0.2 ", + ["192.168.0.10", "192.168.0.11"], + ["192.168.0.1", " ", " 192.168.0.2 "], ["192.168.0.1", "192.168.0.2"], ), ( @@ -177,11 +177,17 @@ def get_suggested(schema, key): ), ], ) -async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: +async def test_option_flow( + hass: HomeAssistant, + parameter: str, + initial: list[str], + suggested: str | list[str], + user_input: str | list[str], + updated: list[str], +) -> None: """Test config flow options.""" basic_parameters = ["known_hosts"] advanced_parameters = ["ignore_cec", "uuid"] - parameter, initial, suggested, user_input, updated = parameter_data data = { "ignore_cec": [], @@ -213,7 +219,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: for other_param in basic_parameters: if other_param == parameter: continue - assert get_suggested(data_schema, other_param) == "" + assert get_suggested(data_schema, other_param) == [] if parameter in basic_parameters: assert get_suggested(data_schema, parameter) == suggested @@ -261,7 +267,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"known_hosts": ""}, + user_input={}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {} @@ -277,7 +283,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: "cast", context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"known_hosts": "192.168.0.1, 192.168.0.2"} + result["flow_id"], {"known_hosts": ["192.168.0.1", "192.168.0.2"]} ) assert result["type"] is FlowResultType.CREATE_ENTRY await hass.async_block_till_done(wait_background_tasks=True) @@ -290,7 +296,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"known_hosts": "192.168.0.11, 192.168.0.12"}, + user_input={"known_hosts": ["192.168.0.11", "192.168.0.12"]}, ) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index b2ce60e9393..668ed985154 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -1909,6 +1909,7 @@ async def test_group_media_control( ) +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_on_idle( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1939,6 +1940,7 @@ async def test_failed_cast_on_idle( assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_other_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1963,6 +1965,7 @@ async def test_failed_cast_other_url( assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_internal_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1992,6 +1995,7 @@ async def test_failed_cast_internal_url( ) +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_external_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 5220d3eccd5..dd6252c4d62 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -208,7 +208,7 @@ async def test_agents_list_backups_fail_cloud( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", diff --git a/tests/components/comelit/test_coordinator.py b/tests/components/comelit/test_coordinator.py new file mode 100644 index 00000000000..a8ef82a7e89 --- /dev/null +++ b/tests/components/comelit/test_coordinator.py @@ -0,0 +1,49 @@ +"""Tests for Comelit SimpleHome coordinator.""" + +from unittest.mock import AsyncMock + +from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.parametrize( + "side_effect", + [ + CannotConnect, + CannotRetrieveData, + CannotAuthenticate, + ], +) +async def test_coordinator_data_update_fails( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + side_effect: Exception, +) -> None: + """Test coordinator data update exceptions.""" + + entity_id = "light.light0" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + mock_serial_bridge.login.side_effect = side_effect + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 739b79e22bd..ce10a36c42c 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -1193,7 +1193,7 @@ async def test_subentry_reconfigure_flow(hass: HomeAssistant, client) -> None: async def async_step_reconfigure(self, user_input=None): if user_input is not None: return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), title="Test Entry", data={"test": "blah"}, diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 6d6d0d4641f..e0db306cae9 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -929,7 +929,7 @@ async def check_translations( ignored_domains = set(ignore_translations_for_mock_domains) # Set all ignored translation keys to "unused" - translation_errors = {k: "unused" for k in ignore_missing_translations} + translation_errors = dict.fromkeys(ignore_missing_translations, "unused") translation_coros = set() diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 849a5b17102..abce735dd8a 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -32,6 +32,7 @@ 'it', 'ka', 'ko', + 'kw', 'lb', 'lt', 'lv', diff --git a/tests/components/conversation/test_chat_log.py b/tests/components/conversation/test_chat_log.py index 97094740af0..d7b3531c658 100644 --- a/tests/components/conversation/test_chat_log.py +++ b/tests/components/conversation/test_chat_log.py @@ -591,7 +591,7 @@ async def test_add_delta_content_stream_errors( async_get_chat_log(hass, session, mock_conversation_input) as chat_log, ): # Stream content without LLM API set - with pytest.raises(ValueError): # noqa: PT012 + with pytest.raises(ValueError): async for _tool_result_content in chat_log.async_add_delta_content_stream( "mock-agent-id", stream( @@ -613,7 +613,7 @@ async def test_add_delta_content_stream_errors( # Non assistant role for role in "system", "user": - with pytest.raises(ValueError): # noqa: PT012 + with pytest.raises(ValueError): async for ( _tool_result_content ) in chat_log.async_add_delta_content_stream( diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index 6d69ec3c739..77fa97ad845 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -536,3 +536,60 @@ async def test_ws_hass_agent_debug_sentence_trigger( # Trigger should not have been executed assert len(calls) == 0 + + +async def test_ws_hass_language_scores( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # Sanity check + result = msg["result"] + assert result["languages"]["en-US"] == { + "cloud": 3, + "focused_local": 2, + "full_local": 3, + } + + +async def test_ws_hass_language_scores_with_filter( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores with language/country filter.""" + client = await hass_ws_client(hass) + + # Language filter + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores", "language": "de"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # German should be preferred + result = msg["result"] + assert result["preferred_language"] == "de-DE" + + # Language/country filter + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/language_scores", + "language": "en", + "country": "GB", + } + ) + + msg = await client.receive_json() + assert msg["success"] + + # GB English should be preferred + result = msg["result"] + assert result["preferred_language"] == "en-GB" diff --git a/tests/components/conversation/test_util.py b/tests/components/conversation/test_util.py deleted file mode 100644 index 72a334232c1..00000000000 --- a/tests/components/conversation/test_util.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test the conversation utils.""" - -from homeassistant.components.conversation.util import create_matcher - - -def test_create_matcher() -> None: - """Test the create matcher method.""" - # Basic sentence - pattern = create_matcher("Hello world") - assert pattern.match("Hello world") is not None - - # Match a part - pattern = create_matcher("Hello {name}") - match = pattern.match("hello world") - assert match is not None - assert match.groupdict()["name"] == "world" - no_match = pattern.match("Hello world, how are you?") - assert no_match is None - - # Optional and matching part - pattern = create_matcher("Turn on [the] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn off kitchen lights") - assert match is None - - # Two different optional parts, 1 matching part - pattern = create_matcher("Turn on [the] [a] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on a kitchen light") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Strip plural - pattern = create_matcher("Turn {name}[s] on") - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Optional 2 words - pattern = create_matcher("Turn [the great] {name} on") - match = pattern.match("turn the great kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" diff --git a/tests/components/cover/test_reproduce_state.py b/tests/components/cover/test_reproduce_state.py index 4aad27011fa..57fc5aed5e9 100644 --- a/tests/components/cover/test_reproduce_state.py +++ b/tests/components/cover/test_reproduce_state.py @@ -7,9 +7,11 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverEntityFeature, CoverState, ) from homeassistant.const import ( + ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -27,35 +29,213 @@ async def test_reproducing_states( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test reproducing Cover states.""" - hass.states.async_set("cover.entity_close", CoverState.CLOSED, {}) + hass.states.async_set( + "cover.entity_close", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_close_open", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.open_only_supports_close_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.open_missing_all_features", + CoverState.OPEN, + ) + hass.states.async_set( + "cover.closed_missing_all_features_has_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + }, + ) + hass.states.async_set( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 50, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_tilt_close_open", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.open_only_supports_tilt_close_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, + }, + ) + hass.states.async_set( + "cover.open_only_supports_position", + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION}, + ) hass.states.async_set( "cover.entity_close_attr", CoverState.CLOSED, - {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, + { + ATTR_CURRENT_POSITION: 0, + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( - "cover.entity_close_tilt", CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} + "cover.entity_close_tilt", + CoverState.CLOSED, + { + ATTR_CURRENT_TILT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, ) - hass.states.async_set("cover.entity_open", CoverState.OPEN, {}) hass.states.async_set( - "cover.entity_slightly_open", CoverState.OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_open", + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN}, + ) + hass.states.async_set( + "cover.entity_slightly_open", + CoverState.OPEN, + { + ATTR_CURRENT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_open_attr", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, + { + ATTR_CURRENT_POSITION: 100, + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_open_tilt", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, + { + ATTR_CURRENT_POSITION: 50, + ATTR_CURRENT_TILT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_entirely_open", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, + { + ATTR_CURRENT_POSITION: 100, + ATTR_CURRENT_TILT_POSITION: 100, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.tilt_only_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.tilt_only_closed", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.tilt_only_tilt_position_100", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 100, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_only_tilt_position_0", + CoverState.CLOSED, + { + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_open_only_supports_tilt_position", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + ATTR_CURRENT_TILT_POSITION: 50, + }, + ) + hass.states.async_set( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + }, ) - close_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER) open_calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) close_tilt_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER_TILT) @@ -70,6 +250,31 @@ async def test_reproducing_states( hass, [ State("cover.entity_close", CoverState.CLOSED), + State("cover.closed_only_supports_close_open", CoverState.CLOSED), + State("cover.closed_only_supports_tilt_close_open", CoverState.CLOSED), + State("cover.open_only_supports_close_open", CoverState.OPEN), + State("cover.open_only_supports_tilt_close_open", CoverState.OPEN), + State("cover.open_missing_all_features", CoverState.OPEN), + State( + "cover.closed_missing_all_features_has_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + }, + ), + State( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 50, + }, + ), + State( + "cover.closed_only_supports_position", + CoverState.CLOSED, + {ATTR_CURRENT_POSITION: 0}, + ), + State("cover.open_only_supports_position", CoverState.OPEN), State( "cover.entity_close_attr", CoverState.CLOSED, @@ -101,6 +306,39 @@ async def test_reproducing_states( CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ), + State( + "cover.tilt_only_open", + CoverState.OPEN, + {}, + ), + State( + "cover.tilt_only_tilt_position_100", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 100}, + ), + State( + "cover.tilt_only_closed", + CoverState.CLOSED, + {}, + ), + State( + "cover.tilt_only_tilt_position_0", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 0}, + ), + State( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 50}, + ), + State( + "cover.tilt_open_only_supports_tilt_position", + CoverState.OPEN, + ), + State( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.CLOSED, + ), ], ) @@ -127,6 +365,35 @@ async def test_reproducing_states( hass, [ State("cover.entity_close", CoverState.OPEN), + State( + "cover.closed_only_supports_close_open", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 100}, + ), + State( + "cover.open_only_supports_close_open", + CoverState.CLOSED, + {ATTR_CURRENT_POSITION: 50}, + ), + State( + "cover.open_only_supports_tilt_close_open", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 50}, + ), + State("cover.closed_only_supports_tilt_close_open", CoverState.OPEN), + State("cover.open_missing_all_features", CoverState.CLOSED), + State( + "cover.closed_missing_all_features_has_position", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 70}, + ), + State( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 20}, + ), + State("cover.closed_only_supports_position", CoverState.OPEN), + State("cover.open_only_supports_position", CoverState.CLOSED), State( "cover.entity_close_attr", CoverState.OPEN, @@ -152,6 +419,39 @@ async def test_reproducing_states( ), # Should not raise State("cover.non_existing", "on"), + State( + "cover.tilt_only_open", + CoverState.CLOSED, + {}, + ), + State( + "cover.tilt_only_tilt_position_100", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 0}, + ), + State( + "cover.tilt_only_closed", + CoverState.OPEN, + {}, + ), + State( + "cover.tilt_only_tilt_position_0", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 100}, + ), + State( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 70}, + ), + State( + "cover.tilt_open_only_supports_tilt_position", + CoverState.CLOSED, + ), + State( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.OPEN, + ), ], ) @@ -159,8 +459,10 @@ async def test_reproducing_states( {"entity_id": "cover.entity_open"}, {"entity_id": "cover.entity_open_attr"}, {"entity_id": "cover.entity_entirely_open"}, + {"entity_id": "cover.open_only_supports_close_open"}, + {"entity_id": "cover.open_missing_all_features"}, ] - assert len(close_calls) == 3 + assert len(close_calls) == len(valid_close_calls) for call in close_calls: assert call.domain == "cover" assert call.data in valid_close_calls @@ -170,8 +472,9 @@ async def test_reproducing_states( {"entity_id": "cover.entity_close"}, {"entity_id": "cover.entity_slightly_open"}, {"entity_id": "cover.entity_open_tilt"}, + {"entity_id": "cover.closed_only_supports_close_open"}, ] - assert len(open_calls) == 3 + assert len(open_calls) == len(valid_open_calls) for call in open_calls: assert call.domain == "cover" assert call.data in valid_open_calls @@ -180,27 +483,77 @@ async def test_reproducing_states( valid_close_tilt_calls = [ {"entity_id": "cover.entity_open_tilt"}, {"entity_id": "cover.entity_entirely_open"}, + {"entity_id": "cover.tilt_only_open"}, + {"entity_id": "cover.entity_open_attr"}, + {"entity_id": "cover.tilt_only_tilt_position_100"}, + {"entity_id": "cover.open_only_supports_tilt_close_open"}, ] - assert len(close_tilt_calls) == 2 + assert len(close_tilt_calls) == len(valid_close_tilt_calls) for call in close_tilt_calls: assert call.domain == "cover" assert call.data in valid_close_tilt_calls valid_close_tilt_calls.remove(call.data) - assert len(open_tilt_calls) == 1 - assert open_tilt_calls[0].domain == "cover" - assert open_tilt_calls[0].data == {"entity_id": "cover.entity_close_tilt"} + valid_open_tilt_calls = [ + {"entity_id": "cover.entity_close_tilt"}, + {"entity_id": "cover.tilt_only_closed"}, + {"entity_id": "cover.tilt_only_tilt_position_0"}, + {"entity_id": "cover.closed_only_supports_tilt_close_open"}, + ] + assert len(open_tilt_calls) == len(valid_open_tilt_calls) + for call in open_tilt_calls: + assert call.domain == "cover" + assert call.data in valid_open_tilt_calls + valid_open_tilt_calls.remove(call.data) - assert len(position_calls) == 1 - assert position_calls[0].domain == "cover" - assert position_calls[0].data == { - "entity_id": "cover.entity_close_attr", - ATTR_POSITION: 50, - } + valid_position_calls = [ + { + "entity_id": "cover.entity_close_attr", + ATTR_POSITION: 50, + }, + { + "entity_id": "cover.closed_missing_all_features_has_position", + ATTR_POSITION: 70, + }, + { + "entity_id": "cover.closed_only_supports_position", + ATTR_POSITION: 100, + }, + { + "entity_id": "cover.open_only_supports_position", + ATTR_POSITION: 0, + }, + ] + assert len(position_calls) == len(valid_position_calls) + for call in position_calls: + assert call.domain == "cover" + assert call.data in valid_position_calls + valid_position_calls.remove(call.data) - assert len(position_tilt_calls) == 1 - assert position_tilt_calls[0].domain == "cover" - assert position_tilt_calls[0].data == { - "entity_id": "cover.entity_close_attr", - ATTR_TILT_POSITION: 50, - } + valid_position_tilt_calls = [ + { + "entity_id": "cover.entity_close_attr", + ATTR_TILT_POSITION: 50, + }, + { + "entity_id": "cover.open_missing_all_features_has_tilt_position", + ATTR_TILT_POSITION: 20, + }, + { + "entity_id": "cover.tilt_open_only_supports_tilt_position", + ATTR_TILT_POSITION: 0, + }, + { + "entity_id": "cover.tilt_closed_only_supports_tilt_position", + ATTR_TILT_POSITION: 100, + }, + { + "entity_id": "cover.tilt_partial_open_only_supports_tilt_position", + ATTR_TILT_POSITION: 70, + }, + ] + assert len(position_tilt_calls) == len(valid_position_tilt_calls) + for call in position_tilt_calls: + assert call.domain == "cover" + assert call.data in valid_position_tilt_calls + valid_position_tilt_calls.remove(call.data) diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 8d8140d609a..b3657810006 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.dormakaba_dkey.const import DOMAIN +from homeassistant.config_entries import SOURCE_IGNORE from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -143,6 +144,43 @@ async def test_async_step_user_takes_precedence_over_discovery( assert not hass.config_entries.flow.async_progress(DOMAIN) +async def test_user_setup_removes_ignored_entry(hass: HomeAssistant) -> None: + """Test the user initiated form can replace an ignored device.""" + ignored_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=DKEY_DISCOVERY_INFO.address, + source=SOURCE_IGNORE, + ) + ignored_entry.add_to_hass(hass) + assert hass.config_entries.async_entries(DOMAIN) == [ignored_entry] + + with patch( + "homeassistant.components.dormakaba_dkey.config_flow.async_discovered_service_info", + return_value=[NOT_DKEY_DISCOVERY_INFO, DKEY_DISCOVERY_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: DKEY_DISCOVERY_INFO.address, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "associate" + assert result["errors"] is None + + await _test_common_success(hass, result) + + # Check the ignored entry is removed + assert ignored_entry not in hass.config_entries.async_entries(DOMAIN) + + async def test_bluetooth_step_success(hass: HomeAssistant) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/energy/test_websocket_api.py b/tests/components/energy/test_websocket_api.py index 959ec7d1687..e4b0e568a70 100644 --- a/tests/components/energy/test_websocket_api.py +++ b/tests/components/energy/test_websocket_api.py @@ -149,7 +149,13 @@ async def test_save_preferences( "stat_energy_to": "my_battery_charging", }, ], - "device_consumption": [{"stat_consumption": "some_device_usage"}], + "device_consumption": [ + { + "stat_consumption": "some_device_usage", + "name": "My Device", + "included_in_stat": "sensor.some_other_device", + } + ], } await client.send_json({"id": 6, "type": "energy/save_prefs", **new_prefs}) diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c1e2c9270e2..101caaf1aea 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -361,7 +361,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -374,7 +374,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -1456,7 +1456,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1471,7 +1471,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1486,7 +1486,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1495,22 +1495,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1525,7 +1525,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1540,7 +1540,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1549,15 +1549,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2519,7 +2519,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -2532,7 +2532,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -5374,7 +5374,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5389,7 +5389,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5404,7 +5404,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5413,22 +5413,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5443,7 +5443,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5458,7 +5458,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5467,22 +5467,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5497,7 +5497,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5512,7 +5512,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5521,22 +5521,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5551,7 +5551,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5566,7 +5566,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5575,22 +5575,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5605,7 +5605,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5620,7 +5620,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5629,22 +5629,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5659,7 +5659,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5674,7 +5674,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5683,22 +5683,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5713,7 +5713,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5728,7 +5728,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5737,22 +5737,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5767,7 +5767,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5782,7 +5782,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5791,15 +5791,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -7026,7 +7026,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -7039,7 +7039,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -9881,7 +9881,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9896,7 +9896,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9911,7 +9911,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9920,22 +9920,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9950,7 +9950,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9965,7 +9965,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9974,22 +9974,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10004,7 +10004,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10019,7 +10019,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10028,22 +10028,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10058,7 +10058,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10073,7 +10073,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10082,22 +10082,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10112,7 +10112,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10127,7 +10127,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10136,22 +10136,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10166,7 +10166,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10181,7 +10181,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10190,22 +10190,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10220,7 +10220,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10235,7 +10235,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10244,22 +10244,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10274,7 +10274,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10289,7 +10289,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10298,15 +10298,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -11630,7 +11630,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11643,7 +11643,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -11688,7 +11688,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11701,7 +11701,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -11746,7 +11746,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11759,7 +11759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -11804,7 +11804,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11817,7 +11817,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -17547,7 +17547,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17562,7 +17562,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17577,7 +17577,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17586,22 +17586,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17616,7 +17616,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17631,7 +17631,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17640,22 +17640,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17670,7 +17670,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17685,7 +17685,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17694,22 +17694,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17724,7 +17724,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17739,7 +17739,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17748,22 +17748,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17778,7 +17778,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17793,7 +17793,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17802,22 +17802,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17832,7 +17832,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17847,7 +17847,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17856,22 +17856,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17886,7 +17886,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17901,7 +17901,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17910,22 +17910,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17940,7 +17940,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17955,7 +17955,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17964,22 +17964,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.14', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17994,7 +17994,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18009,7 +18009,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT', + 'original_name': 'Power factor storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18018,22 +18018,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT', + 'friendly_name': 'Envoy 1234 Power factor storage CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18048,7 +18048,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18063,7 +18063,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l1', + 'original_name': 'Power factor storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18072,22 +18072,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', + 'friendly_name': 'Envoy 1234 Power factor storage CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.32', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18102,7 +18102,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18117,7 +18117,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l2', + 'original_name': 'Power factor storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18126,22 +18126,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', + 'friendly_name': 'Envoy 1234 Power factor storage CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18156,7 +18156,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18171,7 +18171,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l3', + 'original_name': 'Power factor storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18180,15 +18180,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', + 'friendly_name': 'Envoy 1234 Power factor storage CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -19586,7 +19586,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19599,7 +19599,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -19644,7 +19644,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19657,7 +19657,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -19702,7 +19702,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19715,7 +19715,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -19760,7 +19760,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19773,7 +19773,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -24065,7 +24065,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24080,7 +24080,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24095,7 +24095,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24104,22 +24104,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24134,7 +24134,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24149,7 +24149,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24158,22 +24158,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24188,7 +24188,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24203,7 +24203,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24212,22 +24212,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24242,7 +24242,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24257,7 +24257,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24266,22 +24266,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24296,7 +24296,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24311,7 +24311,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24320,22 +24320,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24350,7 +24350,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24365,7 +24365,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24374,22 +24374,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24404,7 +24404,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24419,7 +24419,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24428,22 +24428,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24458,7 +24458,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24473,7 +24473,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24482,15 +24482,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -25326,7 +25326,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25339,7 +25339,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -25799,7 +25799,7 @@ 'state': 'normal', }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25814,7 +25814,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -25829,7 +25829,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25838,15 +25838,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index 3281a760c39..7fc46e87503 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -25,7 +25,12 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components import assist_satellite, conversation, tts +from homeassistant.components import ( + assist_pipeline, + assist_satellite, + conversation, + tts, +) from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite import ( AssistSatelliteConfiguration, @@ -41,7 +46,6 @@ from homeassistant.components.esphome.assist_satellite import ( EsphomeAssistSatellite, VoiceAssistantUDPServer, ) -from homeassistant.components.media_source import PlayMedia from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -57,6 +61,8 @@ from homeassistant.helpers.entity_component import EntityComponent from .conftest import MockESPHomeDevice +from tests.components.tts.common import MockResultStream + def get_satellite_entity( hass: HomeAssistant, mac_address: str @@ -1159,7 +1165,7 @@ async def test_announce_supported_features( Awaitable[MockESPHomeDevice], ], ) -> None: - """Test that the announce supported feature is set by flags.""" + """Test that the announce supported feature is not set by default.""" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, entity_info=[], @@ -1206,25 +1212,32 @@ async def test_announce_message( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING - assert media_id == "https://www.home-assistant.io/resolved.mp3" + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" assert text == "test-text" + assert not start_conversation + assert not preannounce_media_id done.set() with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://bla", ), patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), patch.object( mock_client, @@ -1294,10 +1307,16 @@ async def test_announce_media_id( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING assert media_id == "https://www.home-assistant.io/proxied.flac" + assert not start_conversation + assert not preannounce_media_id done.set() @@ -1326,9 +1345,9 @@ async def test_announce_media_id( assert satellite.state == AssistSatelliteState.IDLE mock_async_create_proxy_url.assert_called_once_with( - hass, - dev.id, - "https://www.home-assistant.io/resolved.mp3", + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", media_format="flac", rate=48000, channels=2, @@ -1336,6 +1355,417 @@ async def test_announce_media_id( ) +async def test_announce_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert not start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + { + "entity_id": satellite.entity_id, + "message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_supported_features( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the start conversation supported feature is not set by default.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + assert not ( + satellite.supported_features & AssistSatelliteEntityFeature.START_CONVERSATION + ) + + +async def test_start_conversation_message( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + {"entity_id": satellite.entity_id, "start_message": "test-text"}, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_media_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + device_registry: dr.DeviceRegistry, +) -> None: + """Test start conversation with media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "https://www.home-assistant.io/proxied.flac" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.esphome.assist_satellite.async_create_proxy_url", + return_value="https://www.home-assistant.io/proxied.flac", + ) as mock_async_create_proxy_url, + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_media_id": "https://www.home-assistant.io/resolved.mp3", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + mock_async_create_proxy_url.assert_called_once_with( + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", + media_format="flac", + rate=48000, + channels=2, + width=2, + ) + + +async def test_start_conversation_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + async def test_satellite_unloaded_on_disconnect( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/evohome/fixtures/botched/user_locations.json b/tests/components/evohome/fixtures/botched/user_locations.json index f2f4091a2dc..0016c5db007 100644 --- a/tests/components/evohome/fixtures/botched/user_locations.json +++ b/tests/components/evohome/fixtures/botched/user_locations.json @@ -8,14 +8,14 @@ "country": "UnitedKingdom", "postcode": "E1 1AA", "locationType": "Residential", - "useDaylightSaveSwitching": true, "timeZone": { - "timeZoneId": "GMTStandardTime", - "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", - "offsetMinutes": 0, - "currentOffsetMinutes": 60, + "timeZoneId": "PacificSAStandardTime", + "displayName": "(UTC-04:00) Santiago", + "offsetMinutes": -240, + "currentOffsetMinutes": -180, "supportsDaylightSaving": true }, + "useDaylightSaveSwitching": true, "locationOwner": { "userId": "2263181", "username": "user_2263181@gmail.com", diff --git a/tests/components/evohome/snapshots/test_climate.ambr b/tests/components/evohome/snapshots/test_climate.ambr index 5a6a6bff863..7fb0ae5aaec 100644 --- a/tests/components/evohome/snapshots/test_climate.ambr +++ b/tests/components/evohome/snapshots/test_climate.ambr @@ -168,10 +168,10 @@ 'target_heat_temperature': 16.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, @@ -215,10 +215,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': False, @@ -257,19 +257,19 @@ 'activeFaults': tuple( dict({ 'fault_type': 'TempZoneActuatorLowBattery', - 'since': '2022-03-02T04:50:20+00:00', + 'since': '2022-03-02T04:50:20-03:00', }), ), 'setpoint_status': dict({ 'setpoint_mode': 'TemporaryOverride', 'target_heat_temperature': 21.0, - 'until': '2022-03-07T19:00:00+00:00', + 'until': '2022-03-07T16:00:00-03:00', }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, @@ -313,10 +313,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, @@ -360,10 +360,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, @@ -407,10 +407,10 @@ 'target_heat_temperature': 16.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, @@ -450,7 +450,7 @@ 'activeFaults': tuple( dict({ 'fault_type': 'TempZoneActuatorCommunicationLost', - 'since': '2022-03-02T15:56:01+00:00', + 'since': '2022-03-02T15:56:01-03:00', }), ), 'setpoint_status': dict({ @@ -458,10 +458,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 22, 10, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'next_sp_temp': 18.6, - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), - 'this_sp_temp': 16.0, + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'next_sp_temp': 16.0, + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 7, 0, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), + 'this_sp_temp': 18.1, }), 'temperature_status': dict({ 'is_available': True, diff --git a/tests/components/evohome/snapshots/test_water_heater.ambr b/tests/components/evohome/snapshots/test_water_heater.ambr index 7b1bc44550a..13fb375c097 100644 --- a/tests/components/evohome/snapshots/test_water_heater.ambr +++ b/tests/components/evohome/snapshots/test_water_heater.ambr @@ -2,10 +2,10 @@ # name: test_set_operation_mode[botched] list([ dict({ - 'until': HAFakeDatetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + 'until': HAFakeDatetime(2024, 7, 10, 12, 30, tzinfo=datetime.timezone.utc), }), dict({ - 'until': HAFakeDatetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + 'until': HAFakeDatetime(2024, 7, 10, 12, 30, tzinfo=datetime.timezone.utc), }), ]) # --- @@ -39,9 +39,9 @@ ), 'dhw_id': '3933910', 'setpoints': dict({ - 'next_sp_from': HAFakeDatetime(2024, 7, 10, 13, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), + 'next_sp_from': HAFakeDatetime(2024, 7, 10, 8, 30, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), 'next_sp_state': 'Off', - 'this_sp_from': HAFakeDatetime(2024, 7, 10, 12, 0, tzinfo=zoneinfo.ZoneInfo(key='Europe/London')), + 'this_sp_from': HAFakeDatetime(2024, 7, 10, 6, 30, tzinfo=zoneinfo.ZoneInfo(key='America/Santiago')), 'this_sp_state': 'On', }), 'state_status': dict({ diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index f170836fa9b..0784d7b6188 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -528,7 +528,7 @@ async def test_holidy_summer_mode( with pytest.raises( HomeAssistantError, - match="Can't change hvac mode while holiday or summer mode is active on the device", + match="Can't change HVAC mode while holiday or summer mode is active on the device", ): await hass.services.async_call( "climate", @@ -564,7 +564,7 @@ async def test_holidy_summer_mode( with pytest.raises( HomeAssistantError, - match="Can't change hvac mode while holiday or summer mode is active on the device", + match="Can't change HVAC mode while holiday or summer mode is active on the device", ): await hass.services.async_call( "climate", diff --git a/tests/components/fujitsu_fglair/test_sensor.py b/tests/components/fujitsu_fglair/test_sensor.py index e3f6109a2e8..b8200f114ad 100644 --- a/tests/components/fujitsu_fglair/test_sensor.py +++ b/tests/components/fujitsu_fglair/test_sensor.py @@ -31,3 +31,20 @@ async def test_entities( assert await integration_setup() await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_no_outside_temperature( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_ayla_api: AsyncMock, + integration_setup: Callable[[], Awaitable[bool]], +) -> None: + """Test that the outside sensor doesn't get added if the reading is None.""" + mock_ayla_api.async_get_devices.return_value[0].outdoor_temperature = None + + assert await integration_setup() + + assert ( + len(entity_registry.entities) + == len(mock_ayla_api.async_get_devices.return_value) - 1 + ) diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json index 21e1fcfb0ab..91157c57c3a 100644 --- a/tests/components/fyta/fixtures/plant_status1.json +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -6,10 +6,18 @@ "low_battery": false, "last_updated": "2023-01-10 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Gummibaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E2", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": null, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Ficus elastica", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status1_update.json b/tests/components/fyta/fixtures/plant_status1_update.json index 98a4c6a9d91..5363c5bd290 100644 --- a/tests/components/fyta/fixtures/plant_status1_update.json +++ b/tests/components/fyta/fixtures/plant_status1_update.json @@ -6,10 +6,18 @@ "low_battery": false, "last_updated": "2023-01-10 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Gummibaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E2", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": null, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture1", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Ficus elastica", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json index bf90ab1e50d..5a181bee576 100644 --- a/tests/components/fyta/fixtures/plant_status2.json +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -6,10 +6,18 @@ "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Kakaobaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E3", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": 7, "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Theobroma cacao", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json index 4bb4e0b81a7..ad34e01065e 100644 --- a/tests/components/fyta/fixtures/plant_status3.json +++ b/tests/components/fyta/fixtures/plant_status3.json @@ -6,10 +6,18 @@ "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Tomatenpflanze", "nutrients_status": 0, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E3", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": 7, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": true, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Solanum lycopersicum", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 24206fbb875..7bc6a6f7b5a 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -32,9 +32,17 @@ 'fertilise_next': None, 'last_updated': '2023-01-10T10:10:00', 'light': 2.0, + 'light_max_acceptable': 675.0, + 'light_max_good': 450.0, + 'light_min_acceptable': 18.0, + 'light_min_good': 20.0, 'light_status': 3, 'low_battery': False, 'moisture': 61.0, + 'moisture_max_acceptable': 80.0, + 'moisture_max_good': 70.0, + 'moisture_min_acceptable': 25.0, + 'moisture_min_good': 35.0, 'moisture_status': 3, 'name': 'Gummibaum', 'notification_light': False, @@ -50,6 +58,10 @@ 'productive_plant': False, 'repotted': True, 'salinity': 1.0, + 'salinity_max_acceptable': 1.2, + 'salinity_max_good': 1.0, + 'salinity_min_acceptable': 0.4, + 'salinity_min_good': 0.6, 'salinity_status': 4, 'scientific_name': 'Ficus elastica', 'sensor_available': True, @@ -59,7 +71,13 @@ 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, + 'temperature_max_acceptable': 42.0, + 'temperature_max_good': 36.0, + 'temperature_min_acceptable': 10.0, + 'temperature_min_good': 17.0, 'temperature_status': 3, + 'user_picture_path': 'http://www.plant_picture.com/user_picture', + 'user_thumb_path': 'http://www.plant_picture.com/user_picture_thumb', }), '1': dict({ 'battery_level': 80.0, @@ -67,9 +85,17 @@ 'fertilise_next': None, 'last_updated': '2023-01-02T10:10:00', 'light': 2.0, + 'light_max_acceptable': 675.0, + 'light_max_good': 450.0, + 'light_min_acceptable': 18.0, + 'light_min_good': 20.0, 'light_status': 3, 'low_battery': True, 'moisture': 61.0, + 'moisture_max_acceptable': 80.0, + 'moisture_max_good': 70.0, + 'moisture_min_acceptable': 25.0, + 'moisture_min_good': 35.0, 'moisture_status': 3, 'name': 'Kakaobaum', 'notification_light': False, @@ -85,6 +111,10 @@ 'productive_plant': False, 'repotted': True, 'salinity': 1.0, + 'salinity_max_acceptable': 1.2, + 'salinity_max_good': 1.0, + 'salinity_min_acceptable': 0.4, + 'salinity_min_good': 0.6, 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', 'sensor_available': True, @@ -94,7 +124,13 @@ 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, + 'temperature_max_acceptable': 42.0, + 'temperature_max_good': 36.0, + 'temperature_min_acceptable': 10.0, + 'temperature_min_good': 17.0, 'temperature_status': 3, + 'user_picture_path': 'http://www.plant_picture.com/user_picture', + 'user_thumb_path': 'http://www.plant_picture.com/user_picture_thumb', }), }), }) diff --git a/tests/components/gios/test_config_flow.py b/tests/components/gios/test_config_flow.py index d81758b0de0..3764c52a810 100644 --- a/tests/components/gios/test_config_flow.py +++ b/tests/components/gios/test_config_flow.py @@ -6,7 +6,8 @@ from unittest.mock import patch from gios import ApiError from homeassistant.components.gios import config_flow -from homeassistant.components.gios.const import CONF_STATION_ID +from homeassistant.components.gios.const import CONF_STATION_ID, DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,36 +18,35 @@ from tests.common import load_fixture CONFIG = { CONF_NAME: "Foo", - CONF_STATION_ID: 123, + CONF_STATION_ID: "123", } async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - flow = config_flow.GiosFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) + with patch( + "homeassistant.components.gios.coordinator.Gios._get_stations", + return_value=STATIONS, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" -async def test_invalid_station_id(hass: HomeAssistant) -> None: - """Test that errors are shown when measuring station ID is invalid.""" +async def test_form_with_api_error(hass: HomeAssistant) -> None: + """Test the form is aborted because of API error.""" with patch( "homeassistant.components.gios.coordinator.Gios._get_stations", - return_value=STATIONS, + side_effect=ApiError("error"), ): - flow = config_flow.GiosFlowHandler() - flow.hass = hass - flow.context = {} - - result = await flow.async_step_user( - user_input={CONF_NAME: "Foo", CONF_STATION_ID: 0} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} ) - assert result["errors"] == {CONF_STATION_ID: "wrong_station_id"} + assert result["type"] is FlowResultType.ABORT async def test_invalid_sensor_data(hass: HomeAssistant) -> None: @@ -76,17 +76,25 @@ async def test_invalid_sensor_data(hass: HomeAssistant) -> None: async def test_cannot_connect(hass: HomeAssistant) -> None: """Test that errors are shown when cannot connect to GIOS server.""" - with patch( - "homeassistant.components.gios.coordinator.Gios._async_get", - side_effect=ApiError("error"), + with ( + patch( + "homeassistant.components.gios.coordinator.Gios._get_stations", + return_value=STATIONS, + ), + patch( + "homeassistant.components.gios.coordinator.Gios._async_get", + side_effect=ApiError("error"), + ), ): - flow = config_flow.GiosFlowHandler() - flow.hass = hass - flow.context = {} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG + ) + await hass.async_block_till_done() - result = await flow.async_step_user(user_input=CONFIG) - - assert result["errors"] == {"base": "cannot_connect"} + assert result["errors"] == {"base": "cannot_connect"} async def test_create_entry(hass: HomeAssistant) -> None: diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 3d10e753714..720c0176850 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -1451,6 +1451,14 @@ async def test_working_location_ignored( assert state.attributes.get("message") == expected_event_message +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("workingLocation", "Test All Day Event"), + ("birthday", None), + ("default", None), + ], +) @pytest.mark.parametrize("calendar_is_primary", [True]) async def test_working_location_entity( hass: HomeAssistant, @@ -1458,12 +1466,14 @@ async def test_working_location_entity( entity_registry: er.EntityRegistry, mock_events_list_items: Callable[[list[dict[str, Any]]], None], component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, ) -> None: """Test that working location events are registered under a disabled by default entity.""" event = { **TEST_EVENT, **upcoming(), - "eventType": "workingLocation", + "eventType": event_type, } mock_events_list_items([event]) assert await component_setup() @@ -1484,7 +1494,7 @@ async def test_working_location_entity( state = hass.states.get("calendar.working_location") assert state assert state.name == "Working location" - assert state.attributes.get("message") == "Test All Day Event" + assert state.attributes.get("message") == expected_event_message @pytest.mark.parametrize("calendar_is_primary", [False]) @@ -1506,3 +1516,49 @@ async def test_no_working_location_entity( entity_entry = entity_registry.async_get("calendar.working_location") assert not entity_entry + + +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("workingLocation", None), + ("birthday", "Test All Day Event"), + ("default", None), + ], +) +@pytest.mark.parametrize("calendar_is_primary", [True]) +async def test_birthday_entity( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, +) -> None: + """Test that birthday events appear only on the birthdays calendar.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": event_type, + } + mock_events_list_items([event]) + assert await component_setup() + + entity_entry = entity_registry.async_get("calendar.birthdays") + assert entity_entry + assert entity_entry.disabled_by is None # Enabled by default + + entity_registry.async_update_entity( + entity_id="calendar.birthdays", disabled_by=None + ) + async_fire_time_changed( + hass, + dt_util.utcnow() + datetime.timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + state = hass.states.get("calendar.birthdays") + assert state + assert state.name == "Birthdays" + assert state.attributes.get("message") == expected_event_message diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index 1ecedbd1173..cc5ccbb1de1 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -98,6 +98,7 @@ 'humidifier', 'input_boolean', 'input_select', + 'lawn_mower', 'light', 'lock', 'media_player', diff --git a/tests/components/google_assistant/test_google_assistant.py b/tests/components/google_assistant/test_google_assistant.py index 2b0bfd82908..035a8d151c4 100644 --- a/tests/components/google_assistant/test_google_assistant.py +++ b/tests/components/google_assistant/test_google_assistant.py @@ -16,13 +16,9 @@ from homeassistant.components import ( light, media_player, ) -from homeassistant.const import ( - CLOUD_NEVER_EXPOSED_ENTITIES, - EntityCategory, - Platform, - UnitOfTemperature, -) +from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES, EntityCategory, Platform from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import DEMO_DEVICES @@ -275,7 +271,7 @@ async def test_query_climate_request_f( ) -> None: """Test a query request.""" # Mock demo devices as fahrenheit to see if we convert to celsius - hass_fixture.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass_fixture.config.units = US_CUSTOMARY_SYSTEM for entity_id in ("climate.hvac", "climate.heatpump", "climate.ecobee"): state = hass_fixture.states.get(entity_id) attr = dict(state.attributes) @@ -332,7 +328,6 @@ async def test_query_climate_request_f( "thermostatHumidityAmbient": 54.2, "currentFanSpeedSetting": "on_high", } - hass_fixture.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_query_humidifier_request( diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index dafe85d97b2..cf9c8047049 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -21,6 +21,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -44,6 +45,7 @@ from homeassistant.components.fan import FanEntityFeature from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import HumidifierEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.light import LightEntityFeature from homeassistant.components.lock import LockEntityFeature from homeassistant.components.media_player import ( @@ -79,6 +81,11 @@ from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, St from homeassistant.core_config import async_process_ha_core_config from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) from . import BASIC_CONFIG, MockConfig @@ -584,6 +591,64 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} +async def test_dock_lawn_mower(hass: HomeAssistant) -> None: + """Test dock trait support for lawn mower domain.""" + assert helpers.get_google_type(lawn_mower.DOMAIN, None) is not None + assert trait.DockTrait.supported(lawn_mower.DOMAIN, 0, None, None) + + trt = trait.DockTrait( + hass, State("lawn_mower.bla", lawn_mower.LawnMowerActivity.MOWING), BASIC_CONFIG + ) + + assert trt.sync_attributes() == {} + + assert trt.query_attributes() == {"isDocked": False} + + calls = async_mock_service(hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_DOCK) + await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) + assert len(calls) == 1 + assert calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + +async def test_startstop_lawn_mower(hass: HomeAssistant) -> None: + """Test startStop trait support for lawn mower domain.""" + assert helpers.get_google_type(lawn_mower.DOMAIN, None) is not None + assert trait.StartStopTrait.supported(lawn_mower.DOMAIN, 0, None, None) + + trt = trait.StartStopTrait( + hass, + State( + "lawn_mower.bla", + lawn_mower.LawnMowerActivity.PAUSED, + {ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.PAUSE}, + ), + BASIC_CONFIG, + ) + + assert trt.sync_attributes() == {"pausable": True} + + assert trt.query_attributes() == {"isRunning": False, "isPaused": True} + + start_calls = async_mock_service( + hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_START_MOWING + ) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + assert len(start_calls) == 1 + assert start_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + pause_calls = async_mock_service(hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_PAUSE) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": True}, {}) + assert len(pause_calls) == 1 + assert pause_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + unpause_calls = async_mock_service( + hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_START_MOWING + ) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": False}, {}) + assert len(unpause_calls) == 1 + assert unpause_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + @pytest.mark.parametrize( ( "domain", @@ -1072,7 +1137,7 @@ async def test_temperature_setting_climate_onoff(hass: HomeAssistant) -> None: assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM trt = trait.TemperatureSettingTrait( hass, @@ -1123,8 +1188,6 @@ async def test_temperature_setting_climate_no_modes(hass: HomeAssistant) -> None assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1153,7 +1216,7 @@ async def test_temperature_setting_climate_range(hass: HomeAssistant) -> None: assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM trt = trait.TemperatureSettingTrait( hass, @@ -1261,7 +1324,6 @@ async def test_temperature_setting_climate_range(hass: HomeAssistant) -> None: ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TEMPERATURE: 75, } - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_temperature_setting_climate_setpoint(hass: HomeAssistant) -> None: @@ -1269,8 +1331,6 @@ async def test_temperature_setting_climate_setpoint(hass: HomeAssistant) -> None assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1356,8 +1416,6 @@ async def test_temperature_setting_climate_setpoint_auto(hass: HomeAssistant) -> Setpoint in auto mode. """ - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1407,8 +1465,6 @@ async def test_temperature_setting_climate_setpoint_auto(hass: HomeAssistant) -> async def test_temperature_control(hass: HomeAssistant) -> None: """Test TemperatureControl trait support for sensor domain.""" - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureControlTrait( hass, State("sensor.temp", 18), @@ -1431,13 +1487,13 @@ async def test_temperature_control(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("unit_in", "unit_out", "temp_in", "temp_out", "current_in", "current_out"), [ - (UnitOfTemperature.CELSIUS, "C", "120", 120, "130", 130), - (UnitOfTemperature.FAHRENHEIT, "F", "248", 120, "266", 130), + (METRIC_SYSTEM, "C", "120", 120, "130", 130), + (US_CUSTOMARY_SYSTEM, "F", "248", 120, "266", 130), ], ) async def test_temperature_control_water_heater( hass: HomeAssistant, - unit_in: UnitOfTemperature, + unit_in: UnitSystem, unit_out: str, temp_in: str, temp_out: float, @@ -1445,17 +1501,17 @@ async def test_temperature_control_water_heater( current_out: float, ) -> None: """Test TemperatureControl trait support for water heater domain.""" - hass.config.units.temperature_unit = unit_in + hass.config.units = unit_in min_temp = TemperatureConverter.convert( water_heater.DEFAULT_MIN_TEMP, UnitOfTemperature.CELSIUS, - unit_in, + unit_in.temperature_unit, ) max_temp = TemperatureConverter.convert( water_heater.DEFAULT_MAX_TEMP, UnitOfTemperature.CELSIUS, - unit_in, + unit_in.temperature_unit, ) trt = trait.TemperatureControlTrait( @@ -1489,30 +1545,30 @@ async def test_temperature_control_water_heater( @pytest.mark.parametrize( ("unit", "temp_init", "temp_in", "temp_out", "current_init"), [ - (UnitOfTemperature.CELSIUS, "180", 220, 220, "180"), - (UnitOfTemperature.FAHRENHEIT, "356", 220, 428, "356"), + (METRIC_SYSTEM, "180", 220, 220, "180"), + (US_CUSTOMARY_SYSTEM, "356", 220, 428, "356"), ], ) async def test_temperature_control_water_heater_set_temperature( hass: HomeAssistant, - unit: UnitOfTemperature, + unit: UnitSystem, temp_init: str, temp_in: float, temp_out: float, current_init: str, ) -> None: """Test TemperatureControl trait support for water heater domain - SetTemperature.""" - hass.config.units.temperature_unit = unit + hass.config.units = unit min_temp = TemperatureConverter.convert( 40, UnitOfTemperature.CELSIUS, - unit, + unit.temperature_unit, ) max_temp = TemperatureConverter.convert( 230, UnitOfTemperature.CELSIUS, - unit, + unit.temperature_unit, ) trt = trait.TemperatureControlTrait( @@ -3633,17 +3689,17 @@ async def test_temperature_control_sensor(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("unit_in", "unit_out", "state", "ambient"), [ - (UnitOfTemperature.FAHRENHEIT, "F", "70", 21.1), - (UnitOfTemperature.CELSIUS, "C", "21.1", 21.1), - (UnitOfTemperature.FAHRENHEIT, "F", "unavailable", None), - (UnitOfTemperature.FAHRENHEIT, "F", "unknown", None), + (US_CUSTOMARY_SYSTEM, "F", "70", 21.1), + (METRIC_SYSTEM, "C", "21.1", 21.1), + (US_CUSTOMARY_SYSTEM, "F", "unavailable", None), + (US_CUSTOMARY_SYSTEM, "F", "unknown", None), ], ) async def test_temperature_control_sensor_data( - hass: HomeAssistant, unit_in, unit_out, state, ambient + hass: HomeAssistant, unit_in: UnitSystem, unit_out, state, ambient ) -> None: """Test TemperatureControl trait support for temperature sensor.""" - hass.config.units.temperature_unit = unit_in + hass.config.units = unit_in trt = trait.TemperatureControlTrait( hass, @@ -3668,7 +3724,6 @@ async def test_temperature_control_sensor_data( } else: assert trt.query_attributes() == {} - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_humidity_setting_sensor(hass: HomeAssistant) -> None: diff --git a/tests/components/google_generative_ai_conversation/__init__.py b/tests/components/google_generative_ai_conversation/__init__.py index 6e2d37b035b..fbf9ee545db 100644 --- a/tests/components/google_generative_ai_conversation/__init__.py +++ b/tests/components/google_generative_ai_conversation/__init__.py @@ -3,12 +3,12 @@ from unittest.mock import Mock from google.genai.errors import ClientError -import requests +import httpx CLIENT_ERROR_500 = ClientError( 500, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "Internal Server Error", @@ -20,7 +20,7 @@ CLIENT_ERROR_500 = ClientError( CLIENT_ERROR_API_KEY_INVALID = ClientError( 400, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "'reason': API_KEY_INVALID", diff --git a/tests/components/google_generative_ai_conversation/conftest.py b/tests/components/google_generative_ai_conversation/conftest.py index 2bc81b10ce4..6ec147da2ab 100644 --- a/tests/components/google_generative_ai_conversation/conftest.py +++ b/tests/components/google_generative_ai_conversation/conftest.py @@ -4,6 +4,9 @@ from unittest.mock import Mock, patch import pytest +from homeassistant.components.google_generative_ai_conversation.conversation import ( + CONF_USE_GOOGLE_SEARCH_TOOL, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -41,6 +44,23 @@ async def mock_config_entry_with_assist( return mock_config_entry +@pytest.fixture +async def mock_config_entry_with_google_search( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + with patch("google.genai.models.AsyncModels.get"): + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_USE_GOOGLE_SEARCH_TOOL: True, + }, + ) + await hass.async_block_till_done() + return mock_config_entry + + @pytest.fixture async def mock_init_component( hass: HomeAssistant, mock_config_entry: ConfigEntry diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index c840f7da324..ec98bdd6529 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -6,7 +6,7 @@ tuple( ), dict({ - 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), 'history': list([ ]), 'model': 'models/gemini-2.0-flash', @@ -25,7 +25,9 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) @@ -56,7 +58,42 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), + }), + ), + ]) +# --- +# name: test_use_google_search + list([ + tuple( + '', + tuple( + ), + dict({ + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'history': list([ + ]), + 'model': 'models/gemini-2.0-flash', + }), + ), + tuple( + '().send_message', + tuple( + ), + dict({ + 'message': 'Please call the test function', + }), + ), + tuple( + '().send_message', + tuple( + ), + dict({ + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 30c9d6c46e6..f7635c0b45e 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -21,12 +21,14 @@ from homeassistant.components.google_generative_ai_conversation.const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TOP_K, RECOMMENDED_TOP_P, + RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, ) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -143,6 +145,7 @@ async def test_form(hass: HomeAssistant) -> None: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, }, ), ( diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 64f71c18bf2..a2b238b9399 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -10,7 +10,7 @@ from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.components import conversation -from homeassistant.components.conversation import trace +from homeassistant.components.conversation import UserContent, async_get_chat_log, trace from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, _format_schema, @@ -18,7 +18,7 @@ from homeassistant.components.google_generative_ai_conversation.conversation imp from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm +from homeassistant.helpers import chat_session, intent, llm from . import CLIENT_ERROR_500 @@ -104,28 +104,24 @@ async def test_function_call( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -156,8 +152,10 @@ async def test_function_call( trace_events = last_trace.get("events", []) assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, - trace.ConversationTraceEventType.AGENT_DETAIL, + trace.ConversationTraceEventType.AGENT_DETAIL, # prompt and tools + trace.ConversationTraceEventType.AGENT_DETAIL, # stats for response trace.ConversationTraceEventType.TOOL_CALL, + trace.ConversationTraceEventType.AGENT_DETAIL, # stats for response ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -166,6 +164,79 @@ async def test_function_call( p["tool_name"] for p in detail_event["data"]["messages"][2]["tool_calls"] ] == ["test_tool"] + detail_event = trace_events[2] + assert set(detail_event["data"]["stats"].keys()) == { + "input_tokens", + "cached_input_tokens", + "output_tokens", + } + + +@patch( + "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" +) +@pytest.mark.usefixtures("mock_init_component") +@pytest.mark.usefixtures("mock_ulid_tools") +async def test_use_google_search( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_google_search: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test function calling.""" + agent_id = "conversation.google_generative_ai_conversation" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + { + vol.Optional("param1", description="Test parameters"): [ + vol.All(str, vol.Lower) + ], + vol.Optional("param2"): vol.Any(float, int), + vol.Optional("param3"): dict, + } + ) + + mock_get_tools.return_value = [mock_tool] + + with patch("google.genai.chats.AsyncChats.create") as mock_create: + mock_chat = AsyncMock() + mock_create.return_value.send_message = mock_chat + chat_response = Mock(prompt_feedback=None) + mock_chat.return_value = chat_response + mock_part = Mock() + mock_part.text = "" + mock_part.function_call = FunctionCall( + name="test_tool", + args={ + "param1": ["test_value", "param1\\'s value"], + "param2": 2.7, + }, + ) + + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: + mock_part.function_call = None + mock_part.text = "Hi there!" + return {"result": "Test response"} + + mock_tool.async_call.side_effect = tool_call + chat_response.candidates = [Mock(content=Mock(parts=[mock_part]))] + await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + device_id="test_device", + ) + + assert [tuple(mock_call) for mock_call in mock_create.mock_calls] == snapshot + @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" @@ -217,28 +288,24 @@ async def test_function_call_without_parameters( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -315,29 +382,25 @@ async def test_function_exception( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "error": "HomeAssistantError", - "error_text": "Test tool exception", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "error": "HomeAssistantError", + "error_text": "Test tool exception", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( hass, @@ -618,3 +681,37 @@ async def test_escape_decode() -> None: async def test_format_schema(openapi, genai_schema) -> None: """Test _format_schema.""" assert _format_schema(openapi) == genai_schema + + +@pytest.mark.usefixtures("mock_init_component") +async def test_empty_content_in_chat_history( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Tests that in case of an empty entry in the chat history the google API will receive an injected space sign instead.""" + with ( + patch("google.genai.chats.AsyncChats.create") as mock_create, + chat_session.async_get_chat_session(hass) as session, + async_get_chat_log(hass, session) as chat_log, + ): + mock_chat = AsyncMock() + mock_create.return_value.send_message = mock_chat + + # Chat preparation with two inputs, one being an empty string + first_input = "First request" + second_input = "" + chat_log.async_add_user_content(UserContent(first_input)) + chat_log.async_add_user_content(UserContent(second_input)) + + await conversation.async_converse( + hass, + "Second request", + session.conversation_id, + Context(), + agent_id="conversation.google_generative_ai_conversation", + ) + + _, kwargs = mock_create.call_args + actual_history = kwargs.get("history") + + assert actual_history[0].parts[0].text == first_input + assert actual_history[1].parts[0].text == " " diff --git a/tests/components/google_generative_ai_conversation/test_init.py b/tests/components/google_generative_ai_conversation/test_init.py index 25533ffd46e..a08acc0df3f 100644 --- a/tests/components/google_generative_ai_conversation/test_init.py +++ b/tests/components/google_generative_ai_conversation/test_init.py @@ -1,6 +1,6 @@ """Tests for the Google Generative AI Conversation integration.""" -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock, mock_open, patch import pytest from requests.exceptions import Timeout @@ -71,6 +71,8 @@ async def test_generate_content_service_with_image( ), patch("pathlib.Path.exists", return_value=True), patch.object(hass.config, "is_allowed_path", return_value=True), + patch("builtins.open", mock_open(read_data="this is an image")), + patch("mimetypes.guess_type", return_value=["image/jpeg"]), ): response = await hass.services.async_call( "google_generative_ai_conversation", diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index d7c011a4c25..e6bfc43252f 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -67,6 +67,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) from .common import async_setup_gree, build_device_mock @@ -411,19 +416,19 @@ async def test_send_power_off_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 73)], + [(METRIC_SYSTEM, 26), (US_CUSTOMARY_SYSTEM, 73)], ) async def test_send_target_temperature( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for sending target temperature command to the device.""" - hass.config.units.temperature_unit = units + hass.config.units = units device().power = True device().mode = HVAC_MODES_REVERSE.get(HVACMode.AUTO) fake_device = device() - if units == UnitOfTemperature.FAHRENHEIT: + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: fake_device.temperature_units = 1 await async_setup_gree(hass) @@ -435,7 +440,7 @@ async def test_send_target_temperature( ENTITY_ID, "off", { - ATTR_UNIT_OF_MEASUREMENT: units, + ATTR_UNIT_OF_MEASUREMENT: units.temperature_unit, }, ) @@ -451,10 +456,6 @@ async def test_send_target_temperature( assert state.attributes.get(ATTR_TEMPERATURE) == temperature assert state.state == HVAC_MODES.get(fake_device.mode) - # Reset config temperature_unit back to CELSIUS, required for - # additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( ("temperature", "hvac_mode"), @@ -493,17 +494,17 @@ async def test_send_target_temperature_with_hvac_mode( @pytest.mark.parametrize( ("units", "temperature"), [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), + (METRIC_SYSTEM, 25), + (US_CUSTOMARY_SYSTEM, 73), + (US_CUSTOMARY_SYSTEM, 74), ], ) async def test_send_target_temperature_device_timeout( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for sending target temperature command to the device with a device timeout.""" - hass.config.units.temperature_unit = units - if units == UnitOfTemperature.FAHRENHEIT: + hass.config.units = units + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: device().temperature_units = 1 device().push_state_update.side_effect = DeviceTimeoutError @@ -520,24 +521,21 @@ async def test_send_target_temperature_device_timeout( assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - # Reset config temperature_unit back to CELSIUS, required for additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( ("units", "temperature"), [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), + (METRIC_SYSTEM, 25), + (US_CUSTOMARY_SYSTEM, 73), + (US_CUSTOMARY_SYSTEM, 74), ], ) async def test_update_target_temperature( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for updating target temperature from the device.""" - hass.config.units.temperature_unit = units - if units == UnitOfTemperature.FAHRENHEIT: + hass.config.units = units + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: device().temperature_units = 1 device().target_temperature = temperature @@ -554,9 +552,6 @@ async def test_update_target_temperature( assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - # Reset config temperature_unit back to CELSIUS, required for additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 3dff57bdd51..085508b4432 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -605,7 +605,18 @@ "startDate": "2024-09-20T23:00:00.000Z", "daysOfMonth": [], "weeksOfMonth": [3], - "checklist": [], + "checklist": [ + { + "completed": false, + "id": "a2a6702d-58e1-46c2-a3ce-422d525cc0b6", + "text": "Checklist-item1" + }, + { + "completed": true, + "id": "9f64e1cd-b0ab-4577-8344-c7a5e1827997", + "text": "Checklist-item2" + } + ], "reminders": [], "createdAt": "2024-10-10T15:57:14.304Z", "updatedAt": "2024-11-27T23:47:29.986Z", diff --git a/tests/components/habitica/snapshots/test_services.ambr b/tests/components/habitica/snapshots/test_services.ambr index af0ec76f3a4..430cd379c0d 100644 --- a/tests/components/habitica/snapshots/test_services.ambr +++ b/tests/components/habitica/snapshots/test_services.ambr @@ -1116,6 +1116,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -3378,6 +3388,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -4511,6 +4531,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -5092,6 +5122,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py index 238cb8412ba..774593fa0f6 100644 --- a/tests/components/habitica/test_services.py +++ b/tests/components/habitica/test_services.py @@ -1,18 +1,20 @@ """Test Habitica actions.""" from collections.abc import Generator -from datetime import datetime +from datetime import UTC, datetime from typing import Any from unittest.mock import AsyncMock, patch from uuid import UUID from aiohttp import ClientError +from freezegun.api import freeze_time from habiticalib import ( Checklist, Direction, Frequency, HabiticaTaskResponse, Reminders, + Repeat, Skill, Task, TaskPriority, @@ -32,6 +34,7 @@ from homeassistant.components.habitica.const import ( ATTR_COUNTER_UP, ATTR_DIRECTION, ATTR_FREQUENCY, + ATTR_INTERVAL, ATTR_ITEM, ATTR_KEYWORD, ATTR_NOTES, @@ -40,8 +43,12 @@ from homeassistant.components.habitica.const import ( ATTR_REMOVE_CHECKLIST_ITEM, ATTR_REMOVE_REMINDER, ATTR_REMOVE_TAG, + ATTR_REPEAT, + ATTR_REPEAT_MONTHLY, ATTR_SCORE_CHECKLIST_ITEM, ATTR_SKILL, + ATTR_START_DATE, + ATTR_STREAK, ATTR_TAG, ATTR_TARGET, ATTR_TASK, @@ -53,6 +60,7 @@ from homeassistant.components.habitica.const import ( SERVICE_ACCEPT_QUEST, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, SERVICE_CREATE_HABIT, SERVICE_CREATE_REWARD, SERVICE_CREATE_TODO, @@ -63,6 +71,7 @@ from homeassistant.components.habitica.const import ( SERVICE_SCORE_REWARD, SERVICE_START_QUEST, SERVICE_TRANSFORMATION, + SERVICE_UPDATE_DAILY, SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, SERVICE_UPDATE_TODO, @@ -952,6 +961,7 @@ async def test_get_tasks( (SERVICE_UPDATE_REWARD, "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"), (SERVICE_UPDATE_HABIT, "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a"), (SERVICE_UPDATE_TODO, "88de7cd9-af2b-49ce-9afd-bf941d87336b"), + (SERVICE_UPDATE_DAILY, "6e53f1f5-a315-4edd-984d-8d762e4a08ef"), ], ) @pytest.mark.usefixtures("habitica") @@ -1003,7 +1013,12 @@ async def test_update_task_exceptions( ) @pytest.mark.parametrize( "service", - [SERVICE_CREATE_REWARD, SERVICE_CREATE_HABIT, SERVICE_CREATE_TODO], + [ + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ], ) @pytest.mark.usefixtures("habitica") async def test_create_task_exceptions( @@ -1606,6 +1621,446 @@ async def test_create_todo( habitica.create_task.assert_awaited_with(call_args) +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_RENAME: "RENAME", + }, + Task(text="RENAME"), + ), + ( + { + ATTR_NOTES: "NOTES", + }, + Task(notes="NOTES"), + ), + ( + { + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_REMOVE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_SCORE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=True, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_UNSCORE_CHECKLIST_ITEM: "Checklist-item2", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_PRIORITY: "trivial", + }, + Task(priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_START_DATE: "2025-03-05", + }, + Task(startDate=datetime(2025, 3, 5)), + ), + ( + { + ATTR_FREQUENCY: "weekly", + }, + Task(frequency=Frequency.WEEKLY), + ), + ( + { + ATTR_INTERVAL: 5, + }, + Task(everyX=5), + ), + ( + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + Task( + frequency=Frequency.WEEKLY, + repeat=Repeat(m=True, t=True, w=True, th=True), + ), + ), + ( + { + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + Task(frequency=Frequency.MONTHLY, daysOfMonth=[20], weeksOfMonth=[]), + ), + ( + { + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + Task( + frequency=Frequency.MONTHLY, + daysOfMonth=[], + weeksOfMonth=[2], + repeat=Repeat( + m=False, t=False, w=False, th=False, f=True, s=False, su=False + ), + ), + ), + ( + { + ATTR_REMINDER: ["10:00"], + }, + Task( + { + "reminders": [ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 10, 0, tzinfo=UTC), + startDate=None, + ) + ] + } + ), + ), + ( + { + ATTR_REMOVE_REMINDER: ["10:00"], + }, + Task({"reminders": []}), + ), + ( + { + ATTR_CLEAR_REMINDER: True, + }, + Task({"reminders": []}), + ), + ( + { + ATTR_STREAK: 10, + }, + Task(streak=10), + ), + ( + { + ATTR_ALIAS: "ALIAS", + }, + Task(alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_update_daily( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica update daily action.""" + task_id = "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.update_task.assert_awaited_with(UUID(task_id), call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.DAILY, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.DAILY, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + checklist=[ + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_PRIORITY: "trivial", + }, + Task(type=TaskType.DAILY, text="TITLE", priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_START_DATE: "2025-03-05", + }, + Task(type=TaskType.DAILY, text="TITLE", startDate=datetime(2025, 3, 5)), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + }, + Task(type=TaskType.DAILY, text="TITLE", frequency=Frequency.WEEKLY), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_INTERVAL: 5, + }, + Task(type=TaskType.DAILY, text="TITLE", everyX=5), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.WEEKLY, + repeat=Repeat(m=True, t=True, w=True, th=True), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[25], + weeksOfMonth=[], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[], + weeksOfMonth=[3], + repeat=Repeat( + m=False, t=True, w=False, th=False, f=False, s=False, su=False + ), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMINDER: ["10:00"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + reminders=[ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 10, 0, tzinfo=UTC), + startDate=None, + ) + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMOVE_REMINDER: ["10:00"], + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_CLEAR_REMINDER: True, + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_STREAK: 10, + }, + Task(type=TaskType.DAILY, text="TITLE", streak=10), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.DAILY, text="TITLE", alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_create_daily( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create daily action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + +@pytest.mark.parametrize( + "service_data", + [ + { + ATTR_FREQUENCY: "daily", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_update_daily_service_validation_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], +) -> None: + """Test Habitica update daily action.""" + task_id = "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + async def test_tags( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/harmony/test_subscriber.py b/tests/components/harmony/test_subscriber.py index f1d1866a044..22957fc3f69 100644 --- a/tests/components/harmony/test_subscriber.py +++ b/tests/components/harmony/test_subscriber.py @@ -38,7 +38,7 @@ async def test_empty_callbacks(hass: HomeAssistant) -> None: """Ensure we handle a missing callback in a subscription.""" subscriber = HarmonySubscriberMixin(hass) - callbacks = {k: None for k in _ALL_CALLBACK_NAMES} + callbacks = dict.fromkeys(_ALL_CALLBACK_NAMES) subscriber.async_subscribe(HarmonyCallback(**callbacks)) _call_all_callbacks(subscriber) await hass.async_block_till_done() diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index 7075b9d6982..c9fbf1a7c56 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -11,7 +11,7 @@ import pytest from homeassistant.auth.models import RefreshToken from homeassistant.components.hassio.handler import HassIO, HassioAPIError -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.setup import async_setup_component @@ -75,7 +75,6 @@ def hassio_stubs( "homeassistant.components.hassio.issues.SupervisorIssues.setup", ), ): - hass.set_state(CoreState.starting) hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) return hass_api.call_args[0][1] diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 07a68b158d3..af951fe8aa1 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -3,6 +3,7 @@ from collections.abc import ( AsyncGenerator, AsyncIterator, + Buffer, Callable, Coroutine, Generator, @@ -13,7 +14,7 @@ from datetime import datetime from io import StringIO import os from pathlib import PurePath -from typing import Any +from typing import Any, cast from unittest.mock import ANY, AsyncMock, Mock, patch from uuid import UUID @@ -341,7 +342,7 @@ def mock_backup_agent( async def delete_backup(backup_id: str, **kwargs: Any) -> None: """Mock delete.""" - get_backup(backup_id) + await get_backup(backup_id) async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Mock download.""" @@ -349,7 +350,7 @@ def mock_backup_agent( async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: """Get a backup.""" - backup = next((b for b in backups if b.backup_id == backup_id), None) + backup = next((b for b in _backups if b.backup_id == backup_id), None) if backup is None: raise BackupNotFound return backup @@ -361,15 +362,15 @@ def mock_backup_agent( **kwargs: Any, ) -> None: """Upload a backup.""" - backups.append(backup) + _backups.append(backup) backup_stream = await open_stream() backup_data = bytearray() async for chunk in backup_stream: backup_data += chunk backups_data[backup.backup_id] = backup_data - backups = backups or [] - backups_data: dict[str, bytes] = {} + _backups = backups or [] + backups_data: dict[str, Buffer] = {} mock_agent = Mock(spec=BackupAgent) mock_agent.domain = domain mock_agent.name = name @@ -401,7 +402,7 @@ async def _setup_backup_platform( platform: BackupAgentPlatformProtocol, ) -> None: """Set up a mock domain.""" - mock_platform(hass, f"{domain}.backup", platform) + mock_platform(hass, f"{domain}.backup", cast(Mock, platform)) assert await async_setup_component(hass, domain, {}) await hass.async_block_till_done() @@ -423,7 +424,7 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.BACKUP, server="test", type=supervisor_mounts.MountType.CIFS, @@ -441,7 +442,7 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.MEDIA, server="test", type=supervisor_mounts.MountType.CIFS, @@ -854,7 +855,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( "with_automatic_settings": False, }, filename=PurePath("Test_2025-01-30_05.42_12345678.tar"), - folders={"ssl"}, + folders={supervisor_backups.Folder("ssl")}, homeassistant_exclude_database=False, homeassistant=True, location=[LOCATION_LOCAL_STORAGE], @@ -877,7 +878,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - replace(DEFAULT_BACKUP_OPTIONS, addons="ALL"), + replace(DEFAULT_BACKUP_OPTIONS, addons=supervisor_backups.AddonSet("ALL")), ), ( {"include_database": False}, @@ -885,7 +886,14 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_folders": ["media", "share"]}, - replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share", "ssl"}), + replace( + DEFAULT_BACKUP_OPTIONS, + folders={ + supervisor_backups.Folder("media"), + supervisor_backups.Folder("share"), + supervisor_backups.Folder("ssl"), + }, + ), ), ( { @@ -895,7 +903,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( }, replace( DEFAULT_BACKUP_OPTIONS, - folders={"media"}, + folders={supervisor_backups.Folder("media")}, homeassistant=False, homeassistant_exclude_database=True, ), @@ -1251,11 +1259,11 @@ async def test_reader_writer_create_per_agent_encryption( hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, supervisor_client: AsyncMock, - commands: dict[str, Any], + commands: list[dict[str, Any]], password: str | None, agent_ids: list[str], password_sent_to_supervisor: str | None, - create_locations: list[str | None], + create_locations: list[str], create_protected: bool, upload_locations: list[str | None], ) -> None: @@ -1270,7 +1278,7 @@ async def test_reader_writer_create_per_agent_encryption( name=f"share{i}", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path=f"share{i}", + user_path=PurePath(f"share{i}"), usage=supervisor_mounts.MountUsage.BACKUP, server=f"share{i}", type=supervisor_mounts.MountType.CIFS, @@ -1996,7 +2004,7 @@ async def test_reader_writer_restore_remote_backup( homeassistant_version="2024.12.0", name="Test", protected=False, - size=0.0, + size=0, ) remote_agent = mock_backup_agent("remote", backups=[test_backup]) await _setup_backup_platform( @@ -2394,7 +2402,7 @@ async def test_reader_writer_restore_wrong_parameters( @pytest.mark.parametrize( - ("get_job_result", "last_non_idle_event"), + ("get_job_result", "last_action_event"), [ ( TEST_JOB_DONE, @@ -2422,7 +2430,7 @@ async def test_restore_progress_after_restart( hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, get_job_result: supervisor_jobs.Job, - last_non_idle_event: dict[str, Any], + last_action_event: dict[str, Any], ) -> None: """Test restore backup progress after restart.""" @@ -2438,7 +2446,7 @@ async def test_restore_progress_after_restart( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] == last_non_idle_event + assert response["result"]["last_action_event"] == last_action_event assert response["result"]["state"] == "idle" @@ -2516,7 +2524,7 @@ async def test_restore_progress_after_restart_report_progress( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] == { + assert response["result"]["last_action_event"] == { "manager_state": "restore_backup", "reason": None, "stage": "addons", @@ -2545,7 +2553,7 @@ async def test_restore_progress_after_restart_unknown_job( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] is None + assert response["result"]["last_action_event"] is None assert response["result"]["state"] == "idle" @@ -2626,7 +2634,7 @@ async def test_config_load_config_info( freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, hass_storage: dict[str, Any], - storage_data: dict[str, Any] | None, + storage_data: dict[str, Any], ) -> None: """Test loading stored backup config and reading it via config/info.""" client = await hass_ws_client(hass) diff --git a/tests/components/heos/__init__.py b/tests/components/heos/__init__.py index 016cc7b3580..1fb67bd114f 100644 --- a/tests/components/heos/__init__.py +++ b/tests/components/heos/__init__.py @@ -2,7 +2,14 @@ from unittest.mock import AsyncMock -from pyheos import ConnectionState, Heos, HeosGroup, HeosOptions, HeosPlayer +from pyheos import ( + ConnectionState, + Heos, + HeosGroup, + HeosOptions, + HeosPlayer, + MediaMusicSource, +) class MockHeos(Heos): @@ -13,6 +20,7 @@ class MockHeos(Heos): super().__init__(options) # Overwrite the methods with async mocks, changing type self.add_to_queue: AsyncMock = AsyncMock() + self.browse_media: AsyncMock = AsyncMock() self.connect: AsyncMock = AsyncMock() self.disconnect: AsyncMock = AsyncMock() self.get_favorites: AsyncMock = AsyncMock() @@ -20,6 +28,7 @@ class MockHeos(Heos): self.get_input_sources: AsyncMock = AsyncMock() self.get_playlists: AsyncMock = AsyncMock() self.get_players: AsyncMock = AsyncMock() + self.get_music_sources: AsyncMock = AsyncMock() self.group_volume_down: AsyncMock = AsyncMock() self.group_volume_up: AsyncMock = AsyncMock() self.get_system_info: AsyncMock = AsyncMock() @@ -28,9 +37,11 @@ class MockHeos(Heos): self.play_preset_station: AsyncMock = AsyncMock() self.play_url: AsyncMock = AsyncMock() self.player_clear_queue: AsyncMock = AsyncMock() + self.player_get_queue: AsyncMock = AsyncMock() self.player_get_quick_selects: AsyncMock = AsyncMock() self.player_play_next: AsyncMock = AsyncMock() self.player_play_previous: AsyncMock = AsyncMock() + self.player_play_queue: AsyncMock = AsyncMock() self.player_play_quick_select: AsyncMock = AsyncMock() self.player_set_mute: AsyncMock = AsyncMock() self.player_set_play_mode: AsyncMock = AsyncMock() @@ -64,3 +75,17 @@ class MockHeos(Heos): def mock_set_connection_state(self, connection_state: ConnectionState) -> None: """Set the connection state on the mock instance.""" self._connection._state = connection_state + + def mock_set_current_host(self, host: str) -> None: + """Set the current host on the mock instance.""" + self._connection._host = host + + def mock_set_music_sources( + self, music_sources: dict[int, MediaMusicSource] + ) -> None: + """Set the music sources on the mock instance.""" + for music_source in music_sources.values(): + music_source.heos = self + self._music_sources = music_sources + self._music_sources_loaded = bool(music_sources) + self.get_music_sources.return_value = music_sources diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 7bed05a0289..835e4436398 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -6,6 +6,7 @@ from collections.abc import Callable, Iterator from unittest.mock import Mock, patch from pyheos import ( + BrowseResult, HeosGroup, HeosHost, HeosNowPlayingMedia, @@ -14,10 +15,12 @@ from pyheos import ( HeosSystem, LineOutLevelType, MediaItem, + MediaMusicSource, MediaType, NetworkType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, const, ) @@ -294,10 +297,10 @@ def quick_selects_fixture() -> dict[int, str]: } -@pytest.fixture(name="playlists") -def playlists_fixture() -> list[MediaItem]: - """Create favorites fixture.""" - playlist = MediaItem( +@pytest.fixture(name="playlist") +def playlist_fixture() -> MediaItem: + """Create playlist fixture.""" + return MediaItem( source_id=const.MUSIC_SOURCE_PLAYLISTS, name="Awesome Music", type=MediaType.PLAYLIST, @@ -306,6 +309,44 @@ def playlists_fixture() -> list[MediaItem]: image_url="", heos=None, ) + + +@pytest.fixture(name="music_sources") +def music_sources_fixture() -> dict[int, MediaMusicSource]: + """Create music sources fixture.""" + return { + const.MUSIC_SOURCE_PANDORA: MediaMusicSource( + source_id=const.MUSIC_SOURCE_PANDORA, + name="Pandora", + type=MediaType.MUSIC_SERVICE, + available=True, + service_username="user", + image_url="", + heos=None, + ), + const.MUSIC_SOURCE_TUNEIN: MediaMusicSource( + source_id=const.MUSIC_SOURCE_TUNEIN, + name="TuneIn", + type=MediaType.MUSIC_SERVICE, + available=False, + service_username=None, + image_url="", + heos=None, + ), + } + + +@pytest.fixture(name="pandora_browse_result") +def pandora_browse_response_fixture(favorites: dict[int, MediaItem]) -> BrowseResult: + """Create a mock response for browsing Pandora.""" + return BrowseResult( + 1, 1, const.MUSIC_SOURCE_PANDORA, items=[favorites[1]], options=[] + ) + + +@pytest.fixture(name="playlists") +def playlists_fixture(playlist: MediaItem) -> list[MediaItem]: + """Create playlists fixture.""" return [playlist] @@ -319,3 +360,28 @@ def change_data_fixture() -> PlayerUpdateResult: def change_data_mapped_ids_fixture() -> PlayerUpdateResult: """Create player change data for testing.""" return PlayerUpdateResult(updated_player_ids={1: 101}) + + +@pytest.fixture(name="queue") +def queue_fixture() -> list[QueueItem]: + """Create a queue fixture.""" + return [ + QueueItem( + queue_id=1, + song="Espresso", + album="Espresso", + artist="Sabrina Carpenter", + image_url="http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg", + media_id="356276483", + album_id="356276481", + ), + QueueItem( + queue_id=2, + song="A Bar Song (Tipsy)", + album="A Bar Song (Tipsy)", + artist="Shaboozey", + image_url="http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg", + media_id="354365598", + album_id="354365596", + ), + ] diff --git a/tests/components/heos/snapshots/test_diagnostics.ambr b/tests/components/heos/snapshots/test_diagnostics.ambr index 98ce8a7bcbf..58685f5cf8f 100644 --- a/tests/components/heos/snapshots/test_diagnostics.ambr +++ b/tests/components/heos/snapshots/test_diagnostics.ambr @@ -106,6 +106,7 @@ 'model': 'HEOS Drive HS2', 'name': 'Test Player', 'network': 'wired', + 'preferred_host': True, 'serial': '**REDACTED**', 'supported_version': True, 'version': '1.0.0', @@ -116,6 +117,7 @@ 'model': 'HEOS Drive HS2', 'name': 'Test Player', 'network': 'wired', + 'preferred_host': True, 'serial': '**REDACTED**', 'supported_version': True, 'version': '1.0.0', @@ -125,6 +127,7 @@ 'model': 'Speaker', 'name': 'Test Player 2', 'network': 'wifi', + 'preferred_host': False, 'serial': '**REDACTED**', 'supported_version': True, 'version': '1.0.0', @@ -137,6 +140,7 @@ 'model': 'HEOS Drive HS2', 'name': 'Test Player', 'network': 'wired', + 'preferred_host': True, 'serial': '**REDACTED**', 'supported_version': True, 'version': '1.0.0', diff --git a/tests/components/heos/snapshots/test_media_player.ambr b/tests/components/heos/snapshots/test_media_player.ambr index 88d27f2073a..d366a7f6317 100644 --- a/tests/components/heos/snapshots/test_media_player.ambr +++ b/tests/components/heos/snapshots/test_media_player.ambr @@ -1,4 +1,190 @@ # serializer version: 1 +# name: test_browse_media_heos_media + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'track', + 'media_content_id': 'heos://media/1/station?name=Today%27s+Hits+Radio&image_url=&playable=True&browsable=False&media_id=123456789', + 'media_content_type': '', + 'thumbnail': '', + 'title': "Today's Hits Radio", + }), + ]), + 'children_media_class': 'track', + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': '', + 'title': 'Pandora', + }) +# --- +# name: test_browse_media_heos_media_error_returns_empty + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + ]), + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': '', + 'title': 'Pandora', + }) +# --- +# name: test_browse_media_media_source + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': 'media-source://media_source/local/test.mp3', + 'media_content_type': 'audio/mpeg', + 'thumbnail': None, + 'title': 'test.mp3', + }), + ]), + 'children_media_class': 'music', + 'media_class': 'directory', + 'media_content_id': 'media-source://media_source/local/.', + 'media_content_type': '', + 'not_shown': 1, + 'thumbnail': None, + 'title': 'media', + }) +# --- +# name: test_browse_media_root + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'Pandora', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'TuneIn', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': 'music', + 'media_class': 'directory', + 'media_content_id': 'media-source://media_source/local/.', + 'media_content_type': '', + 'thumbnail': None, + 'title': 'media', + }), + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_browse_media_root_no_media_source + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'Pandora', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'TuneIn', + }), + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_browse_media_root_source_error_continues + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_get_queue + dict({ + 'media_player.test_player': dict({ + 'queue': list([ + dict({ + 'album': 'Espresso', + 'album_id': '356276481', + 'artist': 'Sabrina Carpenter', + 'image_url': 'http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg', + 'media_id': '356276483', + 'queue_id': 1, + 'song': 'Espresso', + }), + dict({ + 'album': 'A Bar Song (Tipsy)', + 'album_id': '354365596', + 'artist': 'Shaboozey', + 'image_url': 'http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg', + 'media_id': '354365598', + 'queue_id': 2, + 'song': 'A Bar Song (Tipsy)', + }), + ]), + }), + }) +# --- # name: test_state_attributes StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index 87cc8dd7dde..7bc232ad5a6 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -285,11 +285,11 @@ async def test_reconnected_new_entities_created( players = controller.players.copy() players[3] = player_factory(3, "Test Player 3", "HEOS Link") controller.mock_set_players(players) - controller.load_players.return_value = PlayerUpdateResult([3], [], {}) + update = PlayerUpdateResult([3], [], {}) # Simulate reconnection await controller.dispatcher.wait_send( - SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED + SignalType.CONTROLLER_EVENT, const.EVENT_PLAYERS_CHANGED, update ) await hass.async_block_till_done() @@ -297,6 +297,25 @@ async def test_reconnected_new_entities_created( assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "3") +async def test_reconnected_failover_updates_host( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: MockHeos +) -> None: + """Test the config entry host is updated after failover.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + # Simulate reconnection + controller.mock_set_current_host("127.0.0.2") + await controller.dispatcher.wait_send( + SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED + ) + await hass.async_block_till_done() + + # Assert config entry host updated + assert config_entry.data[CONF_HOST] == "127.0.0.2" + + async def test_players_changed_new_entities_created( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 3e755a29a0a..5bc4f2bae30 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -7,23 +7,28 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory from pyheos import ( AddCriteriaType, + BrowseResult, CommandFailedError, HeosError, MediaItem, + MediaMusicSource, MediaType as HeosMediaType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, SignalHeosEvent, SignalType, const, ) +from pyheos.util import mediauri import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.heos.const import ( DOMAIN, + SERVICE_GET_QUEUE, SERVICE_GROUP_VOLUME_DOWN, SERVICE_GROUP_VOLUME_SET, SERVICE_GROUP_VOLUME_UP, @@ -51,6 +56,7 @@ from homeassistant.components.media_player import ( MediaType, RepeatMode, ) +from homeassistant.components.media_source import DOMAIN as MS_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_MEDIA_NEXT_TRACK, @@ -73,6 +79,8 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from . import MockHeos from tests.common import MockConfigEntry, async_fire_time_changed +from tests.conftest import async_setup_component +from tests.typing import WebSocketGenerator async def test_state_attributes( @@ -158,7 +166,6 @@ async def test_updates_from_connection_event( state = hass.states.get("media_player.test_player") assert state is not None assert state.state == STATE_IDLE - assert controller.load_players.call_count == 1 # Disconnected controller.load_players.reset_mock() @@ -170,11 +177,8 @@ async def test_updates_from_connection_event( state = hass.states.get("media_player.test_player") assert state is not None assert state.state == STATE_UNAVAILABLE - assert controller.load_players.call_count == 0 - # Connected handles refresh failure - controller.load_players.reset_mock() - controller.load_players.side_effect = CommandFailedError("", "Failure", 1) + # Reconnect and state updates player.available = True await controller.dispatcher.wait_send( SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED @@ -183,38 +187,6 @@ async def test_updates_from_connection_event( state = hass.states.get("media_player.test_player") assert state is not None assert state.state == STATE_IDLE - assert controller.load_players.call_count == 1 - assert "Unable to refresh players" in caplog.text - - -async def test_updates_from_connection_event_new_player_ids( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - controller: MockHeos, - change_data_mapped_ids: PlayerUpdateResult, -) -> None: - """Test player ids changed after reconnection updates ids.""" - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - - # Assert current IDs - assert device_registry.async_get_device(identifiers={(DOMAIN, "1")}) - assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "1") - - # Send event which will result in updated IDs. - controller.load_players.return_value = change_data_mapped_ids - await controller.dispatcher.wait_send( - SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED - ) - await hass.async_block_till_done() - - # Assert updated IDs and previous don't exist - assert not device_registry.async_get_device(identifiers={(DOMAIN, "1")}) - assert device_registry.async_get_device(identifiers={(DOMAIN, "101")}) - assert not entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "1") - assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "101") async def test_updates_from_sources_updated( @@ -1275,6 +1247,312 @@ async def test_play_media_invalid_type( ) +async def test_play_media_media_uri( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + playlist: MediaItem, +) -> None: + """Test the play media service with HEOS media uri.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + media_content_id = mediauri.to_media_uri(playlist) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: media_content_id, + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_media.assert_called_once() + + +async def test_play_media_media_uri_invalid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with an invalid HEOS media uri raises.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + media_id = "heos://media/1/music_service?name=Pandora&available=False&image_url=" + + with pytest.raises( + HomeAssistantError, + match=re.escape(f"Unable to play media: Invalid media id '{media_id}'"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: media_id, + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_media.assert_not_called() + + +async def test_play_media_music_source_url( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with a music source url.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/test.mp3", + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_url.assert_called_once() + + +async def test_play_media_queue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with type queue.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_TYPE: "queue", + ATTR_MEDIA_CONTENT_ID: "2", + }, + blocking=True, + ) + controller.player_play_queue.assert_called_once_with(1, 2) + + +async def test_play_media_queue_invalid( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: MockHeos +) -> None: + """Test the play media service with an invalid queue id.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + with pytest.raises( + HomeAssistantError, + match=re.escape("Unable to play media: Invalid queue id 'Invalid'"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_TYPE: "queue", + ATTR_MEDIA_CONTENT_ID: "Invalid", + }, + blocking=True, + ) + assert controller.player_play_queue.call_count == 0 + + +async def test_browse_media_root( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + music_sources: dict[int, MediaMusicSource], + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + controller.mock_set_music_sources(music_sources) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_root_no_media_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + music_sources: dict[int, MediaMusicSource], + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.mock_set_music_sources(music_sources) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_root_source_error_continues( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root with an error getting sources continues.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.get_music_sources.side_effect = HeosError("error") + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + assert "Unable to load music sources" in caplog.text + + +async def test_browse_media_heos_media( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + pandora_browse_result: BrowseResult, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a heos media item.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.browse_media.return_value = pandora_browse_result + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_heos_media_error_returns_empty( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a heos media item results in an error, returns empty children.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.browse_media.side_effect = HeosError("error") + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + assert "Unable to browse media" in caplog.text + + +async def test_browse_media_media_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a media source.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "media-source://media_source/local/.", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_invalid_content_id( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test browsing an invalid content id fails.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "invalid", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert not response["success"] + + @pytest.mark.parametrize( ("members", "expected"), [ @@ -1465,3 +1743,27 @@ async def test_media_player_group_fails_wrong_integration( blocking=True, ) controller.set_group.assert_not_called() + + +async def test_get_queue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + queue: list[QueueItem], + snapshot: SnapshotAssertion, +) -> None: + """Test the get queue service.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.player_get_queue.return_value = queue + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.test_player", + }, + blocking=True, + return_response=True, + ) + controller.player_get_queue.assert_called_once_with(1, None, None) + assert response == snapshot diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index c0caf2b2bdd..21cd236b1a8 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -473,20 +473,6 @@ def mock_client_with_exception( return mock -@pytest.fixture(name="appliance_ha_id") -def mock_appliance_ha_id( - appliances: list[HomeAppliance], request: pytest.FixtureRequest -) -> str: - """Fixture to get the ha_id of an appliance.""" - appliance_type = "Washer" - if hasattr(request, "param") and request.param: - appliance_type = request.param - for appliance in appliances: - if appliance.type == appliance_type: - return appliance.ha_id - raise ValueError(f"Appliance {appliance_type} not found") - - @pytest.fixture(name="appliances") def mock_appliances( appliances_data: str, request: pytest.FixtureRequest diff --git a/tests/components/home_connect/snapshots/test_init.ambr b/tests/components/home_connect/snapshots/test_services.ambr similarity index 96% rename from tests/components/home_connect/snapshots/test_init.ambr rename to tests/components/home_connect/snapshots/test_services.ambr index 709621aaefb..610e9fa1248 100644 --- a/tests/components/home_connect/snapshots/test_init.ambr +++ b/tests/components/home_connect/snapshots/test_services.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_set_program_and_options[service_call0-set_selected_program] +# name: test_set_program_and_options[service_call0-set_selected_program-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -18,7 +18,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call1-start_program] +# name: test_set_program_and_options[service_call1-start_program-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -37,7 +37,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call2-set_active_program_options] +# name: test_set_program_and_options[service_call2-set_active_program_options-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -57,7 +57,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call3-set_selected_program_options] +# name: test_set_program_and_options[service_call3-set_selected_program_options-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index a06e386b84f..31c15ec00cf 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -3,7 +3,14 @@ from collections.abc import Awaitable, Callable from unittest.mock import AsyncMock, MagicMock -from aiohomeconnect.model import ArrayOfEvents, Event, EventKey, EventMessage, EventType +from aiohomeconnect.model import ( + ArrayOfEvents, + Event, + EventKey, + EventMessage, + EventType, + HomeAppliance, +) from aiohomeconnect.model.error import HomeConnectApiError import pytest @@ -52,8 +59,9 @@ async def test_binary_sensors( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -67,7 +75,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -75,7 +83,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -83,7 +91,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -92,7 +100,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -100,13 +108,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -123,7 +132,7 @@ async def test_connected_devices( get_status_original_mock = client.get_status def get_status_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -135,14 +144,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_status = get_status_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -150,19 +159,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -async def test_binary_sensors_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_binary_sensors_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if binary sensor entities availability are based on the appliance connection state.""" entity_ids = [ @@ -181,7 +191,7 @@ async def test_binary_sensors_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -195,7 +205,7 @@ async def test_binary_sensors_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -209,6 +219,7 @@ async def test_binary_sensors_entity_availabilty( assert state.state != STATE_UNAVAILABLE +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("value", "expected"), [ @@ -219,7 +230,7 @@ async def test_binary_sensors_entity_availabilty( ], ) async def test_binary_sensors_door_states( - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, value: str, hass: HomeAssistant, @@ -237,7 +248,7 @@ async def test_binary_sensors_door_states( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -259,7 +270,7 @@ async def test_binary_sensors_door_states( @pytest.mark.parametrize( - ("entity_id", "event_key", "event_value_update", "expected", "appliance_ha_id"), + ("entity_id", "event_key", "event_value_update", "expected", "appliance"), [ ( "binary_sensor.washer_remote_control", @@ -304,13 +315,13 @@ async def test_binary_sensors_door_states( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_binary_sensors_functionality( entity_id: str, event_key: EventKey, event_value_update: str, - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -325,7 +336,7 @@ async def test_binary_sensors_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -346,13 +357,14 @@ async def test_binary_sensors_functionality( assert hass.states.is_state(entity_id, expected) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_sensor_functionality( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if the connected binary sensor reports the right values.""" entity_id = "binary_sensor.washer_connectivity" @@ -365,7 +377,7 @@ async def test_connected_sensor_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -378,7 +390,7 @@ async def test_connected_sensor_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) diff --git a/tests/components/home_connect/test_button.py b/tests/components/home_connect/test_button.py index 5af7e40ca43..f894494792d 100644 --- a/tests/components/home_connect/test_button.py +++ b/tests/components/home_connect/test_button.py @@ -4,7 +4,12 @@ from collections.abc import Awaitable, Callable from typing import Any from unittest.mock import AsyncMock, MagicMock -from aiohomeconnect.model import ArrayOfCommands, CommandKey, EventMessage +from aiohomeconnect.model import ( + ArrayOfCommands, + CommandKey, + EventMessage, + HomeAppliance, +) from aiohomeconnect.model.command import Command from aiohomeconnect.model.error import HomeConnectApiError from aiohomeconnect.model.event import ArrayOfEvents, EventType @@ -40,8 +45,9 @@ async def test_buttons( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -55,7 +61,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -63,7 +69,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -71,7 +77,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -80,7 +86,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -88,13 +94,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -112,14 +119,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_available_commands_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_available_commands_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -137,14 +144,14 @@ async def test_connected_devices( client.get_available_commands = get_available_commands_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -152,19 +159,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -async def test_button_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_button_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if button entities availability are based on the appliance connection state.""" entity_ids = [ @@ -183,7 +191,7 @@ async def test_button_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -197,7 +205,7 @@ async def test_button_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -211,6 +219,7 @@ async def test_button_entity_availabilty( assert state.state != STATE_UNAVAILABLE +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("entity_id", "method_call", "expected_kwargs"), [ @@ -231,7 +240,7 @@ async def test_button_functionality( entity_id: str, method_call: str, expected_kwargs: dict[str, Any], - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if button entities availability are based on the appliance connection state.""" assert config_entry.state == ConfigEntryState.NOT_LOADED @@ -248,7 +257,7 @@ async def test_button_functionality( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - getattr(client, method_call).assert_called_with(appliance_ha_id, **expected_kwargs) + getattr(client, method_call).assert_called_with(appliance.ha_id, **expected_kwargs) async def test_command_button_exception( diff --git a/tests/components/home_connect/test_coordinator.py b/tests/components/home_connect/test_coordinator.py index 1e584335fcd..050758a6568 100644 --- a/tests/components/home_connect/test_coordinator.py +++ b/tests/components/home_connect/test_coordinator.py @@ -29,16 +29,26 @@ from homeassistant.components.home_connect.const import ( BSH_DOOR_STATE_OPEN, BSH_EVENT_PRESENT_STATE_PRESENT, BSH_POWER_OFF, + DOMAIN, +) +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, ) from homeassistant.config_entries import ConfigEntries, ConfigEntryState -from homeassistant.const import EVENT_STATE_REPORTED, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + EVENT_STATE_REPORTED, + STATE_UNAVAILABLE, + Platform, +) from homeassistant.core import ( Event as HassEvent, EventStateReportedData, HomeAssistant, callback, ) -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -97,30 +107,30 @@ async def test_coordinator_failure_refresh_and_stream( ) entity_id_1 = "binary_sensor.washer_remote_control" entity_id_2 = "binary_sensor.washer_remote_start" - await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, HA_DOMAIN, {}) await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE client.get_home_appliances.side_effect = HomeConnectError() # Force a coordinator refresh. await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE # Test that the entity becomes available again after a successful update. @@ -136,16 +146,16 @@ async def test_coordinator_failure_refresh_and_stream( # Force a coordinator refresh. await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE # Test that the event stream makes the entity go available too. @@ -159,16 +169,16 @@ async def test_coordinator_failure_refresh_and_stream( # Force a coordinator refresh await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE # Now make the entity available again. client.get_home_appliances.side_effect = None @@ -198,10 +208,10 @@ async def test_coordinator_failure_refresh_and_stream( state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -234,9 +244,9 @@ async def test_coordinator_update_failing( getattr(client, mock_method).assert_called() -@pytest.mark.parametrize("appliance_ha_id", ["Dishwasher"], indirect=True) +@pytest.mark.parametrize("appliance", ["Dishwasher"], indirect=True) @pytest.mark.parametrize( - ("event_type", "event_key", "event_value", "entity_id"), + ("event_type", "event_key", "event_value", ATTR_ENTITY_ID), [ ( EventType.STATUS, @@ -268,7 +278,7 @@ async def test_event_listener( integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, entity_registry: er.EntityRegistry, ) -> None: """Test that the event listener works.""" @@ -279,7 +289,7 @@ async def test_event_listener( state = hass.states.get(entity_id) assert state event_message = EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -326,13 +336,14 @@ async def test_event_listener( listener.assert_called_once_with(new_entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def tests_receive_setting_and_status_for_first_time_at_events( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test that the event listener is capable of receiving settings and status for the first time.""" client.get_setting = AsyncMock(return_value=ArrayOfSettings([])) @@ -345,7 +356,7 @@ async def tests_receive_setting_and_status_for_first_time_at_events( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -361,7 +372,7 @@ async def tests_receive_setting_and_status_for_first_time_at_events( ), ), EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -499,3 +510,44 @@ async def test_event_listener_resilience( state = hass.states.get(entity_id) assert state assert state.state == after_event_expected_state + + +async def test_devices_updated_on_refresh( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test handling of devices added or deleted while event stream is down.""" + appliances: list[HomeAppliance] = ( + client.get_home_appliances.return_value.homeappliances + ) + assert len(appliances) >= 3 + client.get_home_appliances = AsyncMock( + return_value=ArrayOfHomeAppliances(appliances[:2]), + ) + + await async_setup_component(hass, HA_DOMAIN, {}) + assert config_entry.state == ConfigEntryState.NOT_LOADED + await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + for appliance in appliances[:2]: + assert device_registry.async_get_device({(DOMAIN, appliance.ha_id)}) + assert not device_registry.async_get_device({(DOMAIN, appliances[2].ha_id)}) + + client.get_home_appliances = AsyncMock( + return_value=ArrayOfHomeAppliances(appliances[1:3]), + ) + await hass.services.async_call( + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "switch.dishwasher_power"}, + blocking=True, + ) + + assert not device_registry.async_get_device({(DOMAIN, appliances[0].ha_id)}) + for appliance in appliances[2:3]: + assert device_registry.async_get_device({(DOMAIN, appliance.ha_id)}) diff --git a/tests/components/home_connect/test_entity.py b/tests/components/home_connect/test_entity.py index bad02888dbf..e91a01a907a 100644 --- a/tests/components/home_connect/test_entity.py +++ b/tests/components/home_connect/test_entity.py @@ -11,6 +11,7 @@ from aiohomeconnect.model import ( EventKey, EventMessage, EventType, + HomeAppliance, Option, OptionKey, Program, @@ -67,7 +68,7 @@ def platforms() -> list[str]: ) @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "option_entity_id", "options_state_stage_1", "options_availability_stage_2", @@ -91,12 +92,12 @@ def platforms() -> list[str]: (OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY, "switch.dishwasher_extra_dry"), ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_options_retrieval( array_of_programs_program_arg: str, event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, option_entity_id: dict[OptionKey, str], options_state_stage_1: list[tuple[str, bool | None]], options_availability_stage_2: list[bool], @@ -122,7 +123,7 @@ async def test_program_options_retrieval( ] async def get_all_programs_with_options_mock(ha_id: str) -> ArrayOfPrograms: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return await original_get_all_programs_mock(ha_id) array_of_programs: ArrayOfPrograms = await original_get_all_programs_mock(ha_id) @@ -204,7 +205,7 @@ async def test_program_options_retrieval( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -235,6 +236,7 @@ async def test_program_options_retrieval( assert hass.states.is_state(entity_id, STATE_UNKNOWN) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("array_of_programs_program_arg", "event_key"), [ @@ -251,7 +253,7 @@ async def test_program_options_retrieval( async def test_no_options_retrieval_on_unknown_program( array_of_programs_program_arg: str, event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -285,7 +287,7 @@ async def test_no_options_retrieval_on_unknown_program( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -315,7 +317,7 @@ async def test_no_options_retrieval_on_unknown_program( ], ) @pytest.mark.parametrize( - ("appliance_ha_id", "option_key", "option_entity_id"), + ("appliance", "option_key", "option_entity_id"), [ ( "Dishwasher", @@ -323,11 +325,11 @@ async def test_no_options_retrieval_on_unknown_program( "switch.dishwasher_half_load", ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_options_retrieval_after_appliance_connection( event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, option_key: OptionKey, option_entity_id: str, hass: HomeAssistant, @@ -344,7 +346,7 @@ async def test_program_options_retrieval_after_appliance_connection( [ appliance for appliance in array_of_home_appliances.homeappliances - if appliance.ha_id != appliance_ha_id + if appliance.ha_id != appliance.ha_id ] ) @@ -367,7 +369,7 @@ async def test_program_options_retrieval_after_appliance_connection( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents( [ @@ -405,7 +407,7 @@ async def test_program_options_retrieval_after_appliance_connection( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -450,7 +452,6 @@ async def test_program_options_retrieval_after_appliance_connection( async def test_option_entity_functionality_exception( set_active_program_option_side_effect: HomeConnectError | None, set_selected_program_option_side_effect: HomeConnectError | None, - appliance_ha_id: str, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 6e4e428bf6a..21bb0291e1a 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -1,17 +1,18 @@ """Test the integration init functionality.""" from collections.abc import Awaitable, Callable -from http import HTTPStatus from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from aiohomeconnect.const import OAUTH2_TOKEN -from aiohomeconnect.model import OptionKey, ProgramKey, SettingKey, StatusKey -from aiohomeconnect.model.error import HomeConnectError, UnauthorizedError +from aiohomeconnect.model import HomeAppliance, SettingKey, StatusKey +from aiohomeconnect.model.error import ( + HomeConnectError, + TooManyRequestsError, + UnauthorizedError, +) +import aiohttp import pytest -import requests_mock -import respx -from syrupy.assertion import SnapshotAssertion from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.home_connect.const import DOMAIN @@ -22,9 +23,8 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.helpers.issue_registry as ir from script.hassfest.translations import RE_TRANSLATION_KEY from .conftest import ( @@ -37,157 +37,6 @@ from .conftest import ( from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -DEPRECATED_SERVICE_KV_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "set_option_active", - "service_data": { - "device_id": "DEVICE_ID", - "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, - "value": 43200, - "unit": "seconds", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_option_selected", - "service_data": { - "device_id": "DEVICE_ID", - "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, - "value": "LaundryCare.Washer.EnumType.Temperature.GC40", - }, - "blocking": True, - }, -] - -SERVICE_KV_CALL_PARAMS = [ - *DEPRECATED_SERVICE_KV_CALL_PARAMS, - { - "domain": DOMAIN, - "service": "change_setting", - "service_data": { - "device_id": "DEVICE_ID", - "key": SettingKey.BSH_COMMON_CHILD_LOCK.value, - "value": True, - }, - "blocking": True, - }, -] - -SERVICE_COMMAND_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "pause_program", - "service_data": { - "device_id": "DEVICE_ID", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "resume_program", - "service_data": { - "device_id": "DEVICE_ID", - }, - "blocking": True, - }, -] - - -SERVICE_PROGRAM_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "select_program", - "service_data": { - "device_id": "DEVICE_ID", - "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, - "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, - "value": "LaundryCare.Washer.EnumType.Temperature.GC40", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "start_program", - "service_data": { - "device_id": "DEVICE_ID", - "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, - "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, - "value": 43200, - "unit": "seconds", - }, - "blocking": True, - }, -] - -SERVICE_APPLIANCE_METHOD_MAPPING = { - "set_option_active": "set_active_program_option", - "set_option_selected": "set_selected_program_option", - "change_setting": "set_setting", - "pause_program": "put_command", - "resume_program": "put_command", - "select_program": "set_selected_program", - "start_program": "start_program", -} - -SERVICE_VALIDATION_ERROR_MAPPING = { - "set_option_active": r"Error.*setting.*options.*active.*program.*", - "set_option_selected": r"Error.*setting.*options.*selected.*program.*", - "change_setting": r"Error.*assigning.*value.*setting.*", - "pause_program": r"Error.*executing.*command.*", - "resume_program": r"Error.*executing.*command.*", - "select_program": r"Error.*selecting.*program.*", - "start_program": r"Error.*starting.*program.*", -} - - -SERVICES_SET_PROGRAM_AND_OPTIONS = [ - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "selected_program", - "program": "dishcare_dishwasher_program_eco_50", - "b_s_h_common_option_start_in_relative": 1800, - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "active_program", - "program": "consumer_products_coffee_maker_program_beverage_coffee", - "consumer_products_coffee_maker_option_bean_amount": "consumer_products_coffee_maker_enum_type_bean_amount_normal", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "active_program", - "consumer_products_coffee_maker_option_coffee_milk_ratio": "consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "selected_program", - "consumer_products_coffee_maker_option_fill_quantity": 35, - }, - "blocking": True, - }, -] async def test_entry_setup( @@ -221,14 +70,12 @@ async def test_exception_handling( @pytest.mark.parametrize("token_expiration_time", [12345]) -@respx.mock async def test_token_refresh_success( hass: HomeAssistant, platforms: list[Platform], integration_setup: Callable[[MagicMock], Awaitable[bool]], config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, - requests_mock: requests_mock.Mocker, setup_credentials: None, client: MagicMock, ) -> None: @@ -236,7 +83,6 @@ async def test_token_refresh_success( assert config_entry.data["token"]["access_token"] == FAKE_ACCESS_TOKEN - requests_mock.post(OAUTH2_TOKEN, json=SERVER_ACCESS_TOKEN) aioclient_mock.post( OAUTH2_TOKEN, json=SERVER_ACCESS_TOKEN, @@ -280,6 +126,61 @@ async def test_token_refresh_success( ) +@pytest.mark.parametrize("token_expiration_time", [12345]) +@pytest.mark.parametrize( + ("aioclient_mock_args", "expected_config_entry_state"), + [ + ( + { + "status": 400, + "json": {"error": "invalid_grant"}, + }, + ConfigEntryState.SETUP_ERROR, + ), + ( + { + "status": 500, + }, + ConfigEntryState.SETUP_RETRY, + ), + ( + { + "exc": aiohttp.ClientError, + }, + ConfigEntryState.SETUP_RETRY, + ), + ], +) +async def test_token_refresh_error( + aioclient_mock_args: dict[str, Any], + expected_config_entry_state: ConfigEntryState, + hass: HomeAssistant, + platforms: list[Platform], + integration_setup: Callable[[MagicMock], Awaitable[bool]], + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + setup_credentials: None, + client: MagicMock, +) -> None: + """Test where token is expired and the refresh attempt fails.""" + + config_entry.data["token"]["access_token"] = FAKE_ACCESS_TOKEN + + aioclient_mock.post( + OAUTH2_TOKEN, + **aioclient_mock_args, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + with patch( + "homeassistant.components.home_connect.HomeConnectClient", return_value=client + ): + assert not await integration_setup(client) + await hass.async_block_till_done() + + assert config_entry.state == expected_config_entry_state + + @pytest.mark.parametrize( ("exception", "expected_state"), [ @@ -305,196 +206,48 @@ async def test_client_error( @pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_key_value_services( - service_call: dict[str, Any], - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - appliance_ha_id: str, -) -> None: - """Create and test services.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_name = service_call["service"] - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - assert ( - getattr(client, SERVICE_APPLIANCE_METHOD_MAPPING[service_name]).call_count == 1 - ) - - -@pytest.mark.parametrize( - ("service_call", "issue_id"), + "raising_exception_method", [ - *zip( - DEPRECATED_SERVICE_KV_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, - ["deprecated_set_program_and_option_actions"] - * ( - len(DEPRECATED_SERVICE_KV_CALL_PARAMS) - + len(SERVICE_PROGRAM_CALL_PARAMS) - ), - strict=True, - ), - *zip( - SERVICE_COMMAND_CALL_PARAMS, - ["deprecated_command_actions"] * len(SERVICE_COMMAND_CALL_PARAMS), - strict=True, - ), + "get_settings", + "get_status", + "get_all_programs", + "get_available_commands", + "get_available_program", ], ) -async def test_programs_and_options_actions_deprecation( - service_call: dict[str, Any], - issue_id: str, +async def test_client_rate_limit_error( + raising_exception_method: str, hass: HomeAssistant, - device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, - issue_registry: ir.IssueRegistry, - hass_client: ClientSessionGenerator, ) -> None: - """Test deprecated service keys.""" + """Test client errors during setup integration.""" + retry_after = 42 + + original_mock = getattr(client, raising_exception_method) + mock = AsyncMock() + + async def side_effect(*args, **kwargs): + if mock.call_count <= 1: + raise TooManyRequestsError("error.key", retry_after=retry_after) + return await original_mock(*args, **kwargs) + + mock.side_effect = side_effect + setattr(client, raising_exception_method, mock) + assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) + with patch( + "homeassistant.components.home_connect.coordinator.asyncio_sleep", + ) as asyncio_sleep_mock: + assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue(DOMAIN, issue_id) - assert issue - - _client = await hass_client() - resp = await _client.post( - "/api/repairs/issues/fix", - json={"handler": DOMAIN, "issue_id": issue.issue_id}, - ) - assert resp.status == HTTPStatus.OK - flow_id = (await resp.json())["flow_id"] - resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") - - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 - - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 - - -@pytest.mark.parametrize( - ("service_call", "called_method"), - zip( - SERVICES_SET_PROGRAM_AND_OPTIONS, - [ - "set_selected_program", - "start_program", - "set_active_program_options", - "set_selected_program_options", - ], - strict=True, - ), -) -async def test_set_program_and_options( - service_call: dict[str, Any], - called_method: str, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - appliance_ha_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test recognized options.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - method_mock: MagicMock = getattr(client, called_method) - assert method_mock.call_count == 1 - assert method_mock.call_args == snapshot - - -@pytest.mark.parametrize( - ("service_call", "error_regex"), - zip( - SERVICES_SET_PROGRAM_AND_OPTIONS, - [ - r"Error.*selecting.*program.*", - r"Error.*starting.*program.*", - r"Error.*setting.*options.*active.*program.*", - r"Error.*setting.*options.*selected.*program.*", - ], - strict=True, - ), -) -async def test_set_program_and_options_exceptions( - service_call: dict[str, Any], - error_regex: str, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, -) -> None: - """Test recognized options.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - with pytest.raises(HomeAssistantError, match=error_regex): - await hass.services.async_call(**service_call) + assert mock.call_count >= 2 + asyncio_sleep_mock.assert_called_once_with(retry_after) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_required_program_or_at_least_an_option( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -502,7 +255,7 @@ async def test_required_program_or_at_least_an_option( integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: "Test that the set_program_and_options does raise an exception if no program nor options are set." @@ -512,7 +265,7 @@ async def test_required_program_or_at_least_an_option( device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, + identifiers={(DOMAIN, appliance.ha_id)}, ) with pytest.raises( @@ -529,119 +282,13 @@ async def test_required_program_or_at_least_an_option( ) -@pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_services_exception_device_id( - service_call: dict[str, Any], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a HomeAssistantError when there is an API error.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(HomeAssistantError): - await hass.services.async_call(**service_call) - - -async def test_services_appliance_not_found( - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a ServiceValidationError when device id does not match.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - service_call = SERVICE_KV_CALL_PARAMS[0] - - service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" - - with pytest.raises(ServiceValidationError, match=r"Device entry.*not found"): - await hass.services.async_call(**service_call) - - unrelated_config_entry = MockConfigEntry( - domain="TEST", - ) - unrelated_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=unrelated_config_entry.entry_id, - identifiers={("RANDOM", "ABCD")}, - ) - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(ServiceValidationError, match=r"Config entry.*not found"): - await hass.services.async_call(**service_call) - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={("RANDOM", "ABCD")}, - ) - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): - await hass.services.async_call(**service_call) - - -@pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_services_exception( - service_call: dict[str, Any], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a ValueError when device id does not match.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - - service_name = service_call["service"] - with pytest.raises( - HomeAssistantError, - match=SERVICE_VALIDATION_ERROR_MAPPING[service_name], - ): - await hass.services.async_call(**service_call) - - +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_entity_migration( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, config_entry_v1_1: MockConfigEntry, - appliance_ha_id: str, + appliance: HomeAppliance, platforms: list[Platform], ) -> None: """Test entity migration.""" @@ -650,7 +297,7 @@ async def test_entity_migration( device_entry = device_registry.async_get_or_create( config_entry_id=config_entry_v1_1.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, + identifiers={(DOMAIN, appliance.ha_id)}, ) test_entities = [ @@ -690,7 +337,7 @@ async def test_entity_migration( entity_registry.async_get_or_create( domain, DOMAIN, - f"{appliance_ha_id}-{old_unique_id_suffix}", + f"{appliance.ha_id}-{old_unique_id_suffix}", device_id=device_entry.id, config_entry=config_entry_v1_1, ) @@ -701,7 +348,7 @@ async def test_entity_migration( for domain, _, expected_unique_id_suffix in test_entities: assert entity_registry.async_get_entity_id( - domain, DOMAIN, f"{appliance_ha_id}-{expected_unique_id_suffix}" + domain, DOMAIN, f"{appliance.ha_id}-{expected_unique_id_suffix}" ) assert config_entry_v1_1.minor_version == 2 diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 6021c99bb5e..50a1a1e374a 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, SettingKey, ) from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError @@ -21,9 +22,15 @@ from homeassistant.components.home_connect.const import ( BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, DOMAIN, ) -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_HS_COLOR, + ATTR_RGB_COLOR, + DOMAIN as LIGHT_DOMAIN, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, @@ -58,9 +65,9 @@ async def test_light( assert config_entry.state == ConfigEntryState.LOADED -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -74,7 +81,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -82,7 +89,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -90,7 +97,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -99,7 +106,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -107,14 +114,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -132,14 +139,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_settings_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -155,14 +162,14 @@ async def test_connected_devices( client.get_settings = get_settings_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -170,20 +177,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) -async def test_light_availabilty( +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) +async def test_light_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if light entities availability are based on the appliance connection state.""" entity_ids = [ @@ -201,7 +208,7 @@ async def test_light_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -215,7 +222,7 @@ async def test_light_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -236,7 +243,7 @@ async def test_light_availabilty( "service", "exprected_attributes", "state", - "appliance_ha_id", + "appliance", ), [ ( @@ -256,7 +263,7 @@ async def test_light_availabilty( SettingKey.COOKING_COMMON_LIGHTING_BRIGHTNESS: 80, }, SERVICE_TURN_ON, - {"brightness": 199}, + {ATTR_BRIGHTNESS: 199}, STATE_ON, "Hood", ), @@ -277,7 +284,7 @@ async def test_light_availabilty( SettingKey.BSH_COMMON_AMBIENT_LIGHT_BRIGHTNESS: 80, }, SERVICE_TURN_ON, - {"brightness": 199}, + {ATTR_BRIGHTNESS: 199}, STATE_ON, "Hood", ), @@ -310,7 +317,7 @@ async def test_light_availabilty( }, SERVICE_TURN_ON, { - "rgb_color": (255, 255, 0), + ATTR_RGB_COLOR: (255, 255, 0), }, STATE_ON, "Hood", @@ -324,8 +331,8 @@ async def test_light_availabilty( }, SERVICE_TURN_ON, { - "hs_color": (255.484, 15.196), - "brightness": 199, + ATTR_HS_COLOR: (255.484, 15.196), + ATTR_BRIGHTNESS: 199, }, STATE_ON, "Hood", @@ -341,7 +348,7 @@ async def test_light_availabilty( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_light_functionality( entity_id: str, @@ -349,7 +356,7 @@ async def test_light_functionality( service: str, exprected_attributes: dict[str, Any], state: str, - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -362,7 +369,7 @@ async def test_light_functionality( assert config_entry.state == ConfigEntryState.LOADED service_data = exprected_attributes.copy() - service_data["entity_id"] = entity_id + service_data[ATTR_ENTITY_ID] = entity_id await hass.services.async_call( LIGHT_DOMAIN, service, @@ -371,7 +378,7 @@ async def test_light_functionality( await hass.async_block_till_done() client.set_setting.assert_has_calls( [ - call(appliance_ha_id, setting_key=setting_key, value=value) + call(appliance.ha_id, setting_key=setting_key, value=value) for setting_key, value in set_settings_args.items() ] ) @@ -386,7 +393,7 @@ async def test_light_functionality( ( "entity_id", "events", - "appliance_ha_id", + "appliance", ), [ ( @@ -397,12 +404,12 @@ async def test_light_functionality( "Hood", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_light_color_different_than_custom( entity_id: str, events: dict[EventKey, Any], - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -417,21 +424,21 @@ async def test_light_color_different_than_custom( LIGHT_DOMAIN, SERVICE_TURN_ON, { - "rgb_color": (255, 255, 0), - "entity_id": entity_id, + ATTR_RGB_COLOR: (255, 255, 0), + ATTR_ENTITY_ID: entity_id, }, ) await hass.async_block_till_done() entity_state = hass.states.get(entity_id) assert entity_state is not None assert entity_state.state == STATE_ON - assert entity_state.attributes["rgb_color"] is not None - assert entity_state.attributes["hs_color"] is not None + assert entity_state.attributes[ATTR_RGB_COLOR] is not None + assert entity_state.attributes[ATTR_HS_COLOR] is not None await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -454,8 +461,8 @@ async def test_light_color_different_than_custom( entity_state = hass.states.get(entity_id) assert entity_state is not None assert entity_state.state == STATE_ON - assert entity_state.attributes["rgb_color"] is None - assert entity_state.attributes["hs_color"] is None + assert entity_state.attributes[ATTR_RGB_COLOR] is None + assert entity_state.attributes[ATTR_HS_COLOR] is None @pytest.mark.parametrize( @@ -485,7 +492,7 @@ async def test_light_color_different_than_custom( SettingKey.COOKING_COMMON_LIGHTING_BRIGHTNESS: 70, }, SERVICE_TURN_ON, - {"brightness": 200}, + {ATTR_BRIGHTNESS: 200}, [HomeConnectError, HomeConnectError], r"Error.*turn.*on.*", ), @@ -517,7 +524,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_BRIGHTNESS: 70, }, SERVICE_TURN_ON, - {"brightness": 200}, + {ATTR_BRIGHTNESS: 200}, [HomeConnectError, None, HomeConnectError], r"Error.*set.*brightness.*", ), @@ -530,7 +537,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_CUSTOM_COLOR: "#ffff00", }, SERVICE_TURN_ON, - {"rgb_color": (255, 255, 0)}, + {ATTR_RGB_COLOR: (255, 255, 0)}, [HomeConnectError, None, HomeConnectError], r"Error.*select.*custom color.*", ), @@ -543,7 +550,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_CUSTOM_COLOR: "#ffff00", }, SERVICE_TURN_ON, - {"rgb_color": (255, 255, 0)}, + {ATTR_RGB_COLOR: (255, 255, 0)}, [HomeConnectError, None, None, HomeConnectError], r"Error.*set.*color.*", ), @@ -556,8 +563,8 @@ async def test_light_color_different_than_custom( }, SERVICE_TURN_ON, { - "hs_color": (255.484, 15.196), - "brightness": 199, + ATTR_HS_COLOR: (255.484, 15.196), + ATTR_BRIGHTNESS: 199, }, [HomeConnectError, None, None, HomeConnectError], r"Error.*set.*color.*", @@ -600,7 +607,7 @@ async def test_light_exception_handling( with pytest.raises(HomeConnectError): await client_with_exception.set_setting() - service_data["entity_id"] = entity_id + service_data[ATTR_ENTITY_ID] = entity_id with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( LIGHT_DOMAIN, service, service_data, blocking=True diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py index 214dcb6137c..1de384303ce 100644 --- a/tests/components/home_connect/test_number.py +++ b/tests/components/home_connect/test_number.py @@ -2,7 +2,7 @@ from collections.abc import Awaitable, Callable import random -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch from aiohomeconnect.model import ( ArrayOfEvents, @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -22,6 +23,7 @@ from aiohomeconnect.model.error import ( HomeConnectApiError, HomeConnectError, SelectedProgramNotSetError, + TooManyRequestsError, ) from aiohomeconnect.model.program import ( ProgramDefinitionConstraints, @@ -47,7 +49,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture @@ -68,8 +70,9 @@ async def test_number( assert config_entry.state is ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -94,7 +97,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -102,7 +105,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -110,7 +113,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -119,7 +122,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -127,14 +130,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -151,7 +154,7 @@ async def test_connected_devices( get_settings_original_mock = client.get_settings def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -163,14 +166,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_settings = get_settings_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -178,20 +181,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) -async def test_number_entity_availabilty( +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) +async def test_number_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if number entities availability are based on the appliance connection state.""" entity_ids = [ @@ -214,7 +217,7 @@ async def test_number_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -228,7 +231,7 @@ async def test_number_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -242,7 +245,7 @@ async def test_number_entity_availabilty( assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -278,7 +281,7 @@ async def test_number_entity_availabilty( ], ) async def test_number_entity_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, type: str, @@ -335,11 +338,103 @@ async def test_number_entity_functionality( ) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=setting_key, value=value + appliance.ha_id, setting_key=setting_key, value=value ) assert hass.states.is_state(entity_id, str(float(value))) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("retry_after", [0, None]) +@pytest.mark.parametrize( + ( + "entity_id", + "setting_key", + "type", + "min_value", + "max_value", + "step_size", + "unit_of_measurement", + ), + [ + ( + f"{NUMBER_DOMAIN.lower()}.fridgefreezer_refrigerator_temperature", + SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR, + "Double", + 7, + 15, + 5, + "°C", + ), + ], +) +@patch("homeassistant.components.home_connect.entity.API_DEFAULT_RETRY_AFTER", new=0) +async def test_fetch_constraints_after_rate_limit_error( + retry_after: int | None, + appliance: HomeAppliance, + entity_id: str, + setting_key: SettingKey, + type: str, + min_value: int, + max_value: int, + step_size: int, + unit_of_measurement: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, +) -> None: + """Test that, if a API rate limit error is raised, the constraints are fetched later.""" + + def get_settings_side_effect(ha_id: str): + if ha_id != appliance.ha_id: + return ArrayOfSettings([]) + return ArrayOfSettings( + [ + GetSetting( + key=setting_key, + raw_key=setting_key.value, + value=random.randint(min_value, max_value), + ) + ] + ) + + client.get_settings = AsyncMock(side_effect=get_settings_side_effect) + client.get_setting = AsyncMock( + side_effect=[ + TooManyRequestsError("error.key", retry_after=retry_after), + GetSetting( + key=setting_key, + raw_key=setting_key.value, + value=random.randint(min_value, max_value), + unit=unit_of_measurement, + type=type, + constraints=SettingConstraints( + min=min_value, + max=max_value, + step_size=step_size, + ), + ), + ] + ) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + assert client.get_setting.call_count == 2 + + entity_state = hass.states.get(entity_id) + assert entity_state + attributes = entity_state.attributes + assert attributes["min"] == min_value + assert attributes["max"] == max_value + assert attributes["step"] == step_size + assert attributes["unit_of_measurement"] == unit_of_measurement + + @pytest.mark.parametrize( ("entity_id", "setting_key", "mock_attr"), [ @@ -418,7 +513,7 @@ async def test_number_entity_error( ], ) @pytest.mark.parametrize( - ("appliance_ha_id", "entity_id", "option_key", "min", "max", "step_size", "unit"), + ("appliance", "entity_id", "option_key", "min", "max", "step_size", "unit"), [ ( "Oven", @@ -430,12 +525,12 @@ async def test_number_entity_error( "°C", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_options_functionality( entity_id: str, option_key: OptionKey, - appliance_ha_id: str, + appliance: HomeAppliance, min: int, max: int, step_size: int, @@ -522,7 +617,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": 80, diff --git a/tests/components/home_connect/test_select.py b/tests/components/home_connect/test_select.py index 22ece365e6b..f6009640f72 100644 --- a/tests/components/home_connect/test_select.py +++ b/tests/components/home_connect/test_select.py @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -21,6 +22,7 @@ from aiohomeconnect.model.error import ( ActiveProgramNotSetError, HomeConnectError, SelectedProgramNotSetError, + TooManyRequestsError, ) from aiohomeconnect.model.program import ( EnumerateProgram, @@ -50,7 +52,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture @@ -71,8 +73,9 @@ async def test_select( assert config_entry.state is ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -97,7 +100,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -105,7 +108,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -113,7 +116,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -122,7 +125,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -130,13 +133,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -155,13 +159,13 @@ async def test_connected_devices( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -169,19 +173,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries -async def test_select_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_select_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if select entities availability are based on the appliance connection state.""" entity_ids = [ @@ -199,7 +204,7 @@ async def test_select_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -213,7 +218,7 @@ async def test_select_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -289,7 +294,7 @@ async def test_filter_programs( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "expected_initial_state", "mock_method", @@ -317,10 +322,10 @@ async def test_filter_programs( EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM, ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_select_program_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, expected_initial_state: str, mock_method: str, @@ -346,14 +351,14 @@ async def test_select_program_functionality( ) await hass.async_block_till_done() getattr(client, mock_method).assert_awaited_once_with( - appliance_ha_id, program_key=program_key + appliance.ha_id, program_key=program_key ) assert hass.states.is_state(entity_id, program_to_set) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -432,13 +437,13 @@ async def test_select_exception_handling( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {"entity_id": entity_id, "option": program_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: program_to_set}, blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -472,7 +477,7 @@ async def test_select_exception_handling( ], ) async def test_select_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, expected_options: set[str], @@ -496,12 +501,12 @@ async def test_select_functionality( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": value_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: value_to_set}, ) await hass.async_block_till_done() client.set_setting.assert_called_once() - assert client.set_setting.call_args.args == (appliance_ha_id,) + assert client.set_setting.call_args.args == (appliance.ha_id,) assert client.set_setting.call_args.kwargs == { "setting_key": setting_key, "value": expected_value_call_arg, @@ -509,7 +514,7 @@ async def test_select_functionality( assert hass.states.is_state(entity_id, value_to_set) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -521,13 +526,22 @@ async def test_select_functionality( ( "select.hood_ambient_light_color", SettingKey.BSH_COMMON_AMBIENT_LIGHT_COLOR, - [f"BSH.Common.EnumType.AmbientLightColor.Color{i}" for i in range(50)], + [f"BSH.Common.EnumType.AmbientLightColor.Color{i}" for i in range(1, 50)], {str(i) for i in range(1, 50)}, ), + ( + "select.hood_ambient_light_color", + SettingKey.BSH_COMMON_AMBIENT_LIGHT_COLOR, + [ + "A.Non.Documented.Option", + "BSH.Common.EnumType.AmbientLightColor.Color42", + ], + {"42"}, + ), ], ) async def test_fetch_allowed_values( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, test_setting_key: SettingKey, allowed_values: list[str | None], @@ -544,7 +558,7 @@ async def test_fetch_allowed_values( async def get_setting_side_effect( ha_id: str, setting_key: SettingKey ) -> GetSetting: - if ha_id != appliance_ha_id or setting_key != test_setting_key: + if ha_id != appliance.ha_id or setting_key != test_setting_key: return await original_get_setting_side_effect(ha_id, setting_key) return GetSetting( key=test_setting_key, @@ -566,6 +580,139 @@ async def test_fetch_allowed_values( assert set(entity_state.attributes[ATTR_OPTIONS]) == expected_options +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) +@pytest.mark.parametrize( + ( + "entity_id", + "setting_key", + "allowed_values", + "expected_options", + ), + [ + ( + "select.hood_ambient_light_color", + SettingKey.BSH_COMMON_AMBIENT_LIGHT_COLOR, + [f"BSH.Common.EnumType.AmbientLightColor.Color{i}" for i in range(50)], + {str(i) for i in range(1, 50)}, + ), + ], +) +async def test_fetch_allowed_values_after_rate_limit_error( + appliance: HomeAppliance, + entity_id: str, + setting_key: SettingKey, + allowed_values: list[str | None], + expected_options: set[str], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, +) -> None: + """Test fetch allowed values.""" + + def get_settings_side_effect(ha_id: str): + if ha_id != appliance.ha_id: + return ArrayOfSettings([]) + return ArrayOfSettings( + [ + GetSetting( + key=setting_key, + raw_key=setting_key.value, + value="", # Not important + ) + ] + ) + + client.get_settings = AsyncMock(side_effect=get_settings_side_effect) + client.get_setting = AsyncMock( + side_effect=[ + TooManyRequestsError("error.key", retry_after=0), + GetSetting( + key=setting_key, + raw_key=setting_key.value, + value="", # Not important + constraints=SettingConstraints( + allowed_values=allowed_values, + ), + ), + ] + ) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + assert client.get_setting.call_count == 2 + + entity_state = hass.states.get(entity_id) + assert entity_state + assert set(entity_state.attributes[ATTR_OPTIONS]) == expected_options + + +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) +@pytest.mark.parametrize( + ( + "entity_id", + "setting_key", + "exception", + "expected_options", + ), + [ + ( + "select.hood_ambient_light_color", + SettingKey.BSH_COMMON_AMBIENT_LIGHT_COLOR, + HomeConnectError(), + { + "b_s_h_common_enum_type_ambient_light_color_custom_color", + *{str(i) for i in range(1, 100)}, + }, + ), + ], +) +async def test_default_values_after_fetch_allowed_values_error( + appliance: HomeAppliance, + entity_id: str, + setting_key: SettingKey, + exception: Exception, + expected_options: set[str], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, +) -> None: + """Test fetch allowed values.""" + + def get_settings_side_effect(ha_id: str): + if ha_id != appliance.ha_id: + return ArrayOfSettings([]) + return ArrayOfSettings( + [ + GetSetting( + key=setting_key, + raw_key=setting_key.value, + value="", # Not important + ) + ] + ) + + client.get_settings = AsyncMock(side_effect=get_settings_side_effect) + client.get_setting = AsyncMock(side_effect=exception) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state is ConfigEntryState.LOADED + + assert client.get_setting.call_count == 1 + + entity_state = hass.states.get(entity_id) + assert entity_state + assert set(entity_state.attributes[ATTR_OPTIONS]) == expected_options + + @pytest.mark.parametrize( ("entity_id", "setting_key", "allowed_value", "value_to_set", "mock_attr"), [ @@ -615,12 +762,13 @@ async def test_select_entity_error( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": value_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: value_to_set}, blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ( "set_active_program_options_side_effect", @@ -679,6 +827,17 @@ async def test_select_entity_error( "laundry_care_washer_enum_type_temperature_ul_extra_hot", }, ), + ( + "select.washer_temperature", + OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE, + [ + "A.Non.Documented.Option", + "LaundryCare.Washer.EnumType.Temperature.UlWarm", + ], + { + "laundry_care_washer_enum_type_temperature_ul_warm", + }, + ), ], ) async def test_options_functionality( @@ -686,7 +845,7 @@ async def test_options_functionality( option_key: OptionKey, allowed_values: list[str | None] | None, expected_options: set[str], - appliance_ha_id: str, + appliance: HomeAppliance, set_active_program_options_side_effect: ActiveProgramNotSetError | None, set_selected_program_options_side_effect: SelectedProgramNotSetError | None, called_mock_method: str, @@ -740,7 +899,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": "LaundryCare.Washer.EnumType.Temperature.UlWarm", diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index 04f5e056aa5..f30723af7fa 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -10,10 +10,11 @@ from aiohomeconnect.model import ( EventKey, EventMessage, EventType, + HomeAppliance, Status, StatusKey, ) -from aiohomeconnect.model.error import HomeConnectApiError +from aiohomeconnect.model.error import HomeConnectApiError, TooManyRequestsError from freezegun.api import FrozenDateTimeFactory import pytest @@ -26,12 +27,13 @@ from homeassistant.components.home_connect.const import ( BSH_EVENT_PRESENT_STATE_PRESENT, DOMAIN, ) +from homeassistant.components.home_connect.coordinator import HomeConnectError from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed TEST_HC_APP = "Dishwasher" @@ -98,8 +100,9 @@ async def test_sensors( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -113,7 +116,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -121,7 +124,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -129,7 +132,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -138,7 +141,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -146,13 +149,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -169,7 +173,7 @@ async def test_connected_devices( get_status_original_mock = client.get_status def get_status_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -181,14 +185,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_status = get_status_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -196,20 +200,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) -async def test_sensor_entity_availabilty( +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) +async def test_sensor_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if sensor entities availability are based on the appliance connection state.""" entity_ids = [ @@ -228,7 +232,7 @@ async def test_sensor_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -242,7 +246,7 @@ async def test_sensor_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -256,7 +260,7 @@ async def test_sensor_entity_availabilty( assert state.state != STATE_UNAVAILABLE -# Appliance_ha_id program sequence with a delayed start. +# Appliance program sequence with a delayed start. PROGRAM_SEQUENCE_EVENTS = ( EVENT_PROG_DELAYED_START, EVENT_PROG_RUN, @@ -291,7 +295,7 @@ ENTITY_ID_STATES = { } -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) @pytest.mark.parametrize( ("states", "event_run"), list( @@ -304,7 +308,7 @@ ENTITY_ID_STATES = { ) async def test_program_sensors( client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, states: tuple, event_run: dict[EventType, dict[EventKey, str | int]], freezer: FrozenDateTimeFactory, @@ -334,7 +338,7 @@ async def test_program_sensors( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -358,7 +362,7 @@ async def test_program_sensors( assert hass.states.is_state(entity_id, state) -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) @pytest.mark.parametrize( ("initial_operation_state", "initial_state", "event_order", "entity_states"), [ @@ -381,7 +385,7 @@ async def test_program_sensor_edge_case( initial_state: str, event_order: tuple[EventType, EventType], entity_states: tuple[str, str], - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -412,7 +416,7 @@ async def test_program_sensor_edge_case( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -451,9 +455,9 @@ ENTITY_ID_EDGE_CASE_STATES = [ ] -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) async def test_remaining_prog_time_edge_cases( - appliance_ha_id: str, + appliance: HomeAppliance, freezer: FrozenDateTimeFactory, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -477,7 +481,7 @@ async def test_remaining_prog_time_edge_cases( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -508,7 +512,7 @@ async def test_remaining_prog_time_edge_cases( "event_type", "event_value_update", "expected", - "appliance_ha_id", + "appliance", ), [ ( @@ -600,14 +604,14 @@ async def test_remaining_prog_time_edge_cases( "CoffeeMaker", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensors_states( entity_id: str, event_key: EventKey, event_type: EventType, event_value_update: str, - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -615,7 +619,7 @@ async def test_sensors_states( setup_credentials: None, client: MagicMock, ) -> None: - """Tests for Appliance_ha_id alarm sensors.""" + """Tests for appliance alarm sensors.""" assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED @@ -623,7 +627,7 @@ async def test_sensors_states( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -646,7 +650,7 @@ async def test_sensors_states( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "status_key", "unit_get_status", @@ -671,10 +675,10 @@ async def test_sensors_states( 1, ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensor_unit_fetching( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, status_key: StatusKey, unit_get_status: str | None, @@ -689,7 +693,7 @@ async def test_sensor_unit_fetching( """Test that the sensor entities are capable of fetching units.""" async def get_status_mock(ha_id: str) -> ArrayOfStatus: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfStatus([]) return ArrayOfStatus( [ @@ -724,3 +728,122 @@ async def test_sensor_unit_fetching( ) assert client.get_status_value.call_count == get_status_value_call_count + + +@pytest.mark.parametrize( + ( + "appliance", + "entity_id", + "status_key", + ), + [ + ( + "Oven", + "sensor.oven_current_oven_cavity_temperature", + StatusKey.COOKING_OVEN_CURRENT_CAVITY_TEMPERATURE, + ), + ], + indirect=["appliance"], +) +async def test_sensor_unit_fetching_error( + appliance: HomeAppliance, + entity_id: str, + status_key: StatusKey, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, +) -> None: + """Test that the sensor entities are capable of fetching units.""" + + async def get_status_mock(ha_id: str) -> ArrayOfStatus: + if ha_id != appliance.ha_id: + return ArrayOfStatus([]) + return ArrayOfStatus( + [ + Status( + key=status_key, + raw_key=status_key.value, + value=0, + ) + ] + ) + + client.get_status = AsyncMock(side_effect=get_status_mock) + client.get_status_value = AsyncMock(side_effect=HomeConnectError()) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + assert hass.states.get(entity_id) + + +@pytest.mark.parametrize( + ( + "appliance", + "entity_id", + "status_key", + "unit", + ), + [ + ( + "Oven", + "sensor.oven_current_oven_cavity_temperature", + StatusKey.COOKING_OVEN_CURRENT_CAVITY_TEMPERATURE, + "°C", + ), + ], + indirect=["appliance"], +) +async def test_sensor_unit_fetching_after_rate_limit_error( + appliance: HomeAppliance, + entity_id: str, + status_key: StatusKey, + unit: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, +) -> None: + """Test that the sensor entities are capable of fetching units.""" + + async def get_status_mock(ha_id: str) -> ArrayOfStatus: + if ha_id != appliance.ha_id: + return ArrayOfStatus([]) + return ArrayOfStatus( + [ + Status( + key=status_key, + raw_key=status_key.value, + value=0, + ) + ] + ) + + client.get_status = AsyncMock(side_effect=get_status_mock) + client.get_status_value = AsyncMock( + side_effect=[ + TooManyRequestsError("error.key", retry_after=0), + Status( + key=status_key, + raw_key=status_key.value, + value=0, + unit=unit, + ), + ] + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert config_entry.state == ConfigEntryState.LOADED + + assert client.get_status_value.call_count == 2 + + entity_state = hass.states.get(entity_id) + assert entity_state + assert entity_state.attributes["unit_of_measurement"] == unit diff --git a/tests/components/home_connect/test_services.py b/tests/components/home_connect/test_services.py new file mode 100644 index 00000000000..2915cbe4f69 --- /dev/null +++ b/tests/components/home_connect/test_services.py @@ -0,0 +1,474 @@ +"""Tests for the Home Connect actions.""" + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +from typing import Any +from unittest.mock import MagicMock + +from aiohomeconnect.model import HomeAppliance, OptionKey, ProgramKey, SettingKey +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr +import homeassistant.helpers.issue_registry as ir + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + +DEPRECATED_SERVICE_KV_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "set_option_active", + "service_data": { + "device_id": "DEVICE_ID", + "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, + "value": 43200, + "unit": "seconds", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_option_selected", + "service_data": { + "device_id": "DEVICE_ID", + "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, + "value": "LaundryCare.Washer.EnumType.Temperature.GC40", + }, + "blocking": True, + }, +] + +SERVICE_KV_CALL_PARAMS = [ + *DEPRECATED_SERVICE_KV_CALL_PARAMS, + { + "domain": DOMAIN, + "service": "change_setting", + "service_data": { + "device_id": "DEVICE_ID", + "key": SettingKey.BSH_COMMON_CHILD_LOCK.value, + "value": True, + }, + "blocking": True, + }, +] + +SERVICE_COMMAND_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "pause_program", + "service_data": { + "device_id": "DEVICE_ID", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "resume_program", + "service_data": { + "device_id": "DEVICE_ID", + }, + "blocking": True, + }, +] + + +SERVICE_PROGRAM_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "select_program", + "service_data": { + "device_id": "DEVICE_ID", + "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, + "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, + "value": "LaundryCare.Washer.EnumType.Temperature.GC40", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "start_program", + "service_data": { + "device_id": "DEVICE_ID", + "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, + "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, + "value": 43200, + "unit": "seconds", + }, + "blocking": True, + }, +] + +SERVICE_APPLIANCE_METHOD_MAPPING = { + "set_option_active": "set_active_program_option", + "set_option_selected": "set_selected_program_option", + "change_setting": "set_setting", + "pause_program": "put_command", + "resume_program": "put_command", + "select_program": "set_selected_program", + "start_program": "start_program", +} + +SERVICE_VALIDATION_ERROR_MAPPING = { + "set_option_active": r"Error.*setting.*options.*active.*program.*", + "set_option_selected": r"Error.*setting.*options.*selected.*program.*", + "change_setting": r"Error.*assigning.*value.*setting.*", + "pause_program": r"Error.*executing.*command.*", + "resume_program": r"Error.*executing.*command.*", + "select_program": r"Error.*selecting.*program.*", + "start_program": r"Error.*starting.*program.*", +} + + +SERVICES_SET_PROGRAM_AND_OPTIONS = [ + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "selected_program", + "program": "dishcare_dishwasher_program_eco_50", + "b_s_h_common_option_start_in_relative": 1800, + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "active_program", + "program": "consumer_products_coffee_maker_program_beverage_coffee", + "consumer_products_coffee_maker_option_bean_amount": "consumer_products_coffee_maker_enum_type_bean_amount_normal", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "active_program", + "consumer_products_coffee_maker_option_coffee_milk_ratio": "consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "selected_program", + "consumer_products_coffee_maker_option_fill_quantity": 35, + }, + "blocking": True, + }, +] + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_key_value_services( + service_call: dict[str, Any], + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, +) -> None: + """Create and test services.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_name = service_call["service"] + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + assert ( + getattr(client, SERVICE_APPLIANCE_METHOD_MAPPING[service_name]).call_count == 1 + ) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "issue_id"), + [ + *zip( + DEPRECATED_SERVICE_KV_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, + ["deprecated_set_program_and_option_actions"] + * ( + len(DEPRECATED_SERVICE_KV_CALL_PARAMS) + + len(SERVICE_PROGRAM_CALL_PARAMS) + ), + strict=True, + ), + *zip( + SERVICE_COMMAND_CALL_PARAMS, + ["deprecated_command_actions"] * len(SERVICE_COMMAND_CALL_PARAMS), + strict=True, + ), + ], +) +async def test_programs_and_options_actions_deprecation( + service_call: dict[str, Any], + issue_id: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test deprecated service keys.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue + + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "called_method"), + zip( + SERVICES_SET_PROGRAM_AND_OPTIONS, + [ + "set_selected_program", + "start_program", + "set_active_program_options", + "set_selected_program_options", + ], + strict=True, + ), +) +async def test_set_program_and_options( + service_call: dict[str, Any], + called_method: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, + snapshot: SnapshotAssertion, +) -> None: + """Test recognized options.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + method_mock: MagicMock = getattr(client, called_method) + assert method_mock.call_count == 1 + assert method_mock.call_args == snapshot + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "error_regex"), + zip( + SERVICES_SET_PROGRAM_AND_OPTIONS, + [ + r"Error.*selecting.*program.*", + r"Error.*starting.*program.*", + r"Error.*setting.*options.*active.*program.*", + r"Error.*setting.*options.*selected.*program.*", + ], + strict=True, + ), +) +async def test_set_program_and_options_exceptions( + service_call: dict[str, Any], + error_regex: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, +) -> None: + """Test recognized options.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + with pytest.raises(HomeAssistantError, match=error_regex): + await hass.services.async_call(**service_call) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_services_exception_device_id( + service_call: dict[str, Any], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a HomeAssistantError when there is an API error.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(HomeAssistantError): + await hass.services.async_call(**service_call) + + +async def test_services_appliance_not_found( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a ServiceValidationError when device id does not match.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + service_call = SERVICE_KV_CALL_PARAMS[0] + + service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" + + with pytest.raises(ServiceValidationError, match=r"Device entry.*not found"): + await hass.services.async_call(**service_call) + + unrelated_config_entry = MockConfigEntry( + domain="TEST", + ) + unrelated_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=unrelated_config_entry.entry_id, + identifiers={("RANDOM", "ABCD")}, + ) + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(ServiceValidationError, match=r"Config entry.*not found"): + await hass.services.async_call(**service_call) + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("RANDOM", "ABCD")}, + ) + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): + await hass.services.async_call(**service_call) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_services_exception( + service_call: dict[str, Any], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a ValueError when device id does not match.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + service_name = service_call["service"] + with pytest.raises( + HomeAssistantError, + match=SERVICE_VALIDATION_ERROR_MAPPING[service_name], + ): + await hass.services.async_call(**service_call) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 1b38809dc05..2903c8ac718 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -13,6 +13,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -79,8 +80,9 @@ async def test_switches( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -105,7 +107,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -113,7 +115,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -121,7 +123,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -130,7 +132,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -138,13 +140,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -162,14 +165,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_settings_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -185,14 +188,14 @@ async def test_connected_devices( client.get_settings = get_settings_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -200,20 +203,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Dishwasher"], indirect=True) -async def test_switch_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Dishwasher"], indirect=True) +async def test_switch_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if switch entities availability are based on the appliance connection state.""" entity_ids = [ @@ -233,7 +236,7 @@ async def test_switch_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -247,7 +250,7 @@ async def test_switch_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -268,7 +271,7 @@ async def test_switch_entity_availabilty( "settings_key_arg", "setting_value_arg", "state", - "appliance_ha_id", + "appliance", ), [ ( @@ -288,7 +291,7 @@ async def test_switch_entity_availabilty( "Dishwasher", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_switch_functionality( entity_id: str, @@ -300,7 +303,7 @@ async def test_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality.""" @@ -312,13 +315,13 @@ async def test_switch_functionality( await hass.services.async_call(SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=settings_key_arg, value=setting_value_arg + appliance.ha_id, setting_key=settings_key_arg, value=setting_value_arg ) assert hass.states.is_state(entity_id, state) @pytest.mark.parametrize( - ("entity_id", "program_key", "initial_state", "appliance_ha_id"), + ("entity_id", "program_key", "initial_state", "appliance"), [ ( "switch.dryer_program_mix", @@ -333,7 +336,7 @@ async def test_switch_functionality( "Dryer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_switch_functionality( entity_id: str, @@ -343,7 +346,7 @@ async def test_program_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality.""" @@ -383,7 +386,7 @@ async def test_program_switch_functionality( await hass.async_block_till_done() assert hass.states.is_state(entity_id, STATE_ON) client.start_program.assert_awaited_once_with( - appliance_ha_id, program_key=program_key + appliance.ha_id, program_key=program_key ) await hass.services.async_call( @@ -391,7 +394,7 @@ async def test_program_switch_functionality( ) await hass.async_block_till_done() assert hass.states.is_state(entity_id, STATE_OFF) - client.stop_program.assert_awaited_once_with(appliance_ha_id) + client.stop_program.assert_awaited_once_with(appliance.ha_id) @pytest.mark.parametrize( @@ -496,7 +499,7 @@ async def test_switch_exception_handling( @pytest.mark.parametrize( - ("entity_id", "status", "service", "state", "appliance_ha_id"), + ("entity_id", "status", "service", "state", "appliance"), [ ( "switch.fridgefreezer_freezer_super_mode", @@ -513,7 +516,7 @@ async def test_switch_exception_handling( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_ent_desc_switch_functionality( entity_id: str, @@ -524,7 +527,7 @@ async def test_ent_desc_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality - entity description setup.""" @@ -544,7 +547,7 @@ async def test_ent_desc_switch_functionality( "status", "service", "mock_attr", - "appliance_ha_id", + "appliance", "exception_match", ), [ @@ -565,7 +568,7 @@ async def test_ent_desc_switch_functionality( r"Error.*turn.*off.*", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_ent_desc_switch_exception_handling( entity_id: str, @@ -577,7 +580,7 @@ async def test_ent_desc_switch_exception_handling( integration_setup: Callable[[MagicMock], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client_with_exception: MagicMock, ) -> None: """Test switch exception handling - entity description setup.""" @@ -613,7 +616,7 @@ async def test_ent_desc_switch_exception_handling( "service", "setting_value_arg", "power_state", - "appliance_ha_id", + "appliance", ), [ ( @@ -649,9 +652,9 @@ async def test_ent_desc_switch_exception_handling( "Dishwasher", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) -async def test_power_swtich( +async def test_power_switch( entity_id: str, allowed_values: list[str | None] | None, service: str, @@ -661,7 +664,7 @@ async def test_power_swtich( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test power switch functionality.""" @@ -686,7 +689,7 @@ async def test_power_swtich( await hass.services.async_call(SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, + appliance.ha_id, setting_key=SettingKey.BSH_COMMON_POWER_STATE, value=setting_value_arg, ) @@ -800,7 +803,7 @@ async def test_power_switch_service_validation_errors( @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_create_issue( hass: HomeAssistant, - appliance_ha_id: str, + appliance: HomeAppliance, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, @@ -882,7 +885,7 @@ async def test_create_issue( ], ) @pytest.mark.parametrize( - ("entity_id", "option_key", "appliance_ha_id"), + ("entity_id", "option_key", "appliance"), [ ( "switch.dishwasher_half_load", @@ -890,12 +893,12 @@ async def test_create_issue( "Dishwasher", ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_options_functionality( entity_id: str, option_key: OptionKey, - appliance_ha_id: str, + appliance: HomeAppliance, set_active_program_options_side_effect: ActiveProgramNotSetError | None, set_selected_program_options_side_effect: SelectedProgramNotSetError | None, called_mock_method: str, @@ -933,7 +936,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": False, @@ -946,7 +949,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": True, diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py index affb5ecfedf..e52e62a8927 100644 --- a/tests/components/home_connect/test_time.py +++ b/tests/components/home_connect/test_time.py @@ -2,6 +2,7 @@ from collections.abc import Awaitable, Callable from datetime import time +from http import HTTPStatus from unittest.mock import AsyncMock, MagicMock from aiohomeconnect.model import ( @@ -10,20 +11,32 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, SettingKey, ) from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError import pytest +from homeassistant.components.automation import ( + DOMAIN as AUTOMATION_DOMAIN, + automations_with_entity, +) from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN, scripts_with_entity from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -44,9 +57,10 @@ async def test_time( assert config_entry.state is ConfigEntryState.LOADED -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -60,7 +74,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -68,7 +82,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -76,7 +90,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -85,7 +99,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -93,14 +107,15 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -117,7 +132,7 @@ async def test_connected_devices( get_settings_original_mock = client.get_settings async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -129,14 +144,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_settings = get_settings_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -144,20 +159,21 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) -async def test_time_entity_availabilty( +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_time_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if time entities availability are based on the appliance connection state.""" entity_ids = [ @@ -175,7 +191,7 @@ async def test_time_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -189,7 +205,7 @@ async def test_time_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -203,7 +219,8 @@ async def test_time_entity_availabilty( assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) @pytest.mark.parametrize( ("entity_id", "setting_key"), [ @@ -214,7 +231,7 @@ async def test_time_entity_availabilty( ], ) async def test_time_entity_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, hass: HomeAssistant, @@ -242,11 +259,12 @@ async def test_time_entity_functionality( ) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=setting_key, value=value + appliance.ha_id, setting_key=setting_key, value=value ) assert hass.states.is_state(entity_id, str(time(second=value))) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("entity_id", "setting_key", "mock_attr"), [ @@ -298,3 +316,170 @@ async def test_time_entity_error( blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_create_issue( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_issue_fix( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + for issue in issue_registry.issues.copy().values(): + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/homeassistant_hardware/test_coordinator.py b/tests/components/homeassistant_hardware/test_coordinator.py new file mode 100644 index 00000000000..9c57aac6811 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_coordinator.py @@ -0,0 +1,55 @@ +"""Test firmware update coordinator for Home Assistant Hardware.""" + +from unittest.mock import AsyncMock, Mock, call, patch + +from ha_silabs_firmware_client import FirmwareManifest, ManifestMissing +import pytest +from yarl import URL + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.util import dt as dt_util + + +async def test_firmware_update_coordinator_fetching( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the firmware update coordinator loads manifests.""" + session = async_get_clientsession(hass) + + manifest = FirmwareManifest( + url=URL("https://example.org/firmware"), + html_url=URL("https://example.org/release_notes"), + created_at=dt_util.utcnow(), + firmwares=(), + ) + + mock_client = Mock() + mock_client.async_update_data = AsyncMock(side_effect=[ManifestMissing(), manifest]) + + with patch( + "homeassistant.components.homeassistant_hardware.coordinator.FirmwareUpdateClient", + return_value=mock_client, + ): + coordinator = FirmwareUpdateCoordinator( + hass, session, "https://example.org/firmware" + ) + + listener = Mock() + coordinator.async_add_listener(listener) + + # The first update will fail + await coordinator.async_refresh() + assert listener.mock_calls == [call()] + assert coordinator.data is None + assert "GitHub release assets haven't been uploaded yet" in caplog.text + + # The second will succeed + await coordinator.async_refresh() + assert listener.mock_calls == [call(), call()] + assert coordinator.data == manifest + + await coordinator.async_shutdown() diff --git a/tests/components/homeassistant_hardware/test_update.py b/tests/components/homeassistant_hardware/test_update.py new file mode 100644 index 00000000000..0c351141e12 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_update.py @@ -0,0 +1,637 @@ +"""Test Home Assistant Hardware firmware update entity.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncGenerator +import dataclasses +import logging +from unittest.mock import AsyncMock, Mock, patch + +import aiohttp +from ha_silabs_firmware_client import FirmwareManifest, FirmwareMetadata +import pytest +from yarl import URL + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, + async_register_firmware_info_provider, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, + FirmwareUpdateExtraStoredData, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, + OwningIntegration, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow +from homeassistant.const import EVENT_STATE_CHANGED, EntityCategory +from homeassistant.core import ( + Event, + EventStateChangedData, + HomeAssistant, + HomeAssistantError, + State, + callback, +) +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + async_capture_events, + mock_config_flow, + mock_integration, + mock_platform, + mock_restore_cache_with_extra_data, +) + +TEST_DOMAIN = "test" +TEST_DEVICE = "/dev/serial/by-id/some-unique-serial-device-12345" +TEST_FIRMWARE_RELEASES_URL = "https://example.org/firmware" +TEST_UPDATE_ENTITY_ID = "update.test_firmware" +TEST_MANIFEST = FirmwareManifest( + url=URL("https://example.org/firmware"), + html_url=URL("https://example.org/release_notes"), + created_at=dt_util.utcnow(), + firmwares=( + FirmwareMetadata( + filename="skyconnect_zigbee_ncp_test.gbl", + checksum="aaa", + size=123, + release_notes="Some release notes go here", + metadata={ + "baudrate": 115200, + "ezsp_version": "7.4.4.0", + "fw_type": "zigbee_ncp", + "fw_variant": None, + "metadata_version": 2, + "sdk_version": "4.4.4", + }, + url=URL("https://example.org/firmwares/skyconnect_zigbee_ncp_test.gbl"), + ), + ), +) + + +TEST_FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="skyconnect_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="skyconnect_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + None: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _mock_async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> MockFirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data["firmware"] + entity_description = TEST_FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) if firmware_type is not None else None + ] + + entity = MockFirmwareUpdateEntity( + device=config_entry.data["device"], + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + TEST_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _mock_async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def mock_async_setup_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, ["update"]) + return True + + +async def mock_async_setup_update_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _mock_async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class MockFirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """Mock SkyConnect firmware update entity.""" + + bootloader_reset_type = None + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the mock SkyConnect firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + self._attr_unique_id = self.entity_description.key + + # Use the cached firmware info if it exists + if self._config_entry.data["firmware"] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data["firmware"]), + firmware_version=self._config_entry.data["firmware_version"], + owners=[], + source=TEST_DOMAIN, + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + super()._firmware_info_callback(firmware_info) + + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + "firmware": firmware_info.firmware_type, + "firmware_version": firmware_info.firmware_version, + }, + ) + + +@pytest.fixture(name="update_config_entry") +async def mock_update_config_entry( + hass: HomeAssistant, +) -> AsyncGenerator[ConfigEntry]: + """Set up a mock Home Assistant Hardware firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, "homeassistant_hardware", {}) + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=mock_async_setup_entry, + ), + built_in=False, + ) + mock_platform(hass, "test.config_flow") + mock_platform( + hass, + "test.update", + MockPlatform(async_setup_entry=mock_async_setup_update_entities), + ) + + # Set up a mock integration using the hardware update entity + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "device": TEST_DEVICE, + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + }, + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.coordinator.FirmwareUpdateClient", + autospec=True, + ) as mock_update_client, + mock_config_flow(TEST_DOMAIN, ConfigFlow), + ): + mock_update_client.return_value.async_update_data.return_value = TEST_MANIFEST + yield config_entry + + +async def test_update_entity_installation( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity installation.""" + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # Set up another integration communicating with the device + owning_config_entry = MockConfigEntry( + domain="another_integration", + data={ + "device": { + "path": TEST_DEVICE, + "flow_control": "hardware", + "baudrate": 115200, + }, + "radio_type": "ezsp", + }, + version=4, + ) + owning_config_entry.add_to_hass(hass) + owning_config_entry.mock_state(hass, ConfigEntryState.LOADED) + + # The integration provides firmware info + mock_hw_module = Mock() + mock_hw_module.get_firmware_info = lambda hass, config_entry: FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[OwningIntegration(config_entry_id=config_entry.entry_id)], + source="another_integration", + ) + + async_register_firmware_info_provider(hass, "another_integration", mock_hw_module) + + # Pretend the other integration loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "another_integration", + mock_hw_module.get_firmware_info(hass, owning_config_entry), + ) + + state_before_update = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_update is not None + assert state_before_update.state == "unknown" + assert state_before_update.attributes["title"] == "EmberZNet" + assert state_before_update.attributes["installed_version"] == "7.3.1.0" + assert state_before_update.attributes["latest_version"] is None + + # When we check for an update, one will be shown + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + state_after_update = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_update is not None + assert state_after_update.state == "on" + assert state_after_update.attributes["title"] == "EmberZNet" + assert state_after_update.attributes["installed_version"] == "7.3.1.0" + assert state_after_update.attributes["latest_version"] == "7.4.4.0" + assert state_after_update.attributes["release_summary"] == ( + "Some release notes go here" + ) + assert state_after_update.attributes["release_url"] == ( + "https://example.org/release_notes" + ) + + mock_firmware = Mock() + mock_flasher = AsyncMock() + + async def mock_flash_firmware(fw_image, progress_callback): + await asyncio.sleep(0) + progress_callback(0, 100) + await asyncio.sleep(0) + progress_callback(50, 100) + await asyncio.sleep(0) + progress_callback(100, 100) + + mock_flasher.flash_firmware = mock_flash_firmware + + # When we install it, the other integration is reloaded + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=mock_firmware, + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=mock_flasher, + ), + patch( + "homeassistant.components.homeassistant_hardware.update.probe_silabs_firmware_info", + return_value=FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.4.4.0 build 0", + owners=[], + source="probe", + ), + ), + patch.object( + owning_config_entry, "async_unload", wraps=owning_config_entry.async_unload + ) as owning_config_entry_unload, + ): + state_changes: list[Event[EventStateChangedData]] = async_capture_events( + hass, EVENT_STATE_CHANGED + ) + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # Progress events are emitted during the installation + assert len(state_changes) == 7 + + # Indeterminate progress first + assert state_changes[0].data["new_state"].attributes["in_progress"] is True + assert state_changes[0].data["new_state"].attributes["update_percentage"] is None + + # Then the update starts + assert state_changes[1].data["new_state"].attributes["update_percentage"] == 0 + assert state_changes[2].data["new_state"].attributes["update_percentage"] == 50 + assert state_changes[3].data["new_state"].attributes["update_percentage"] == 100 + + # Once it is done, we probe the firmware + assert state_changes[4].data["new_state"].attributes["in_progress"] is True + assert state_changes[4].data["new_state"].attributes["update_percentage"] is None + + # Finally, the update finishes + assert state_changes[5].data["new_state"].attributes["update_percentage"] is None + assert state_changes[6].data["new_state"].attributes["update_percentage"] is None + assert state_changes[6].data["new_state"].attributes["in_progress"] is False + + # The owning integration was unloaded and is again running + assert len(owning_config_entry_unload.mock_calls) == 1 + + # After the firmware update, the entity has the new version and the correct state + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "off" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.4.4.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_installation_failure( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test installation failing during flashing.""" + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + state_before_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_install is not None + assert state_before_install.state == "on" + assert state_before_install.attributes["title"] == "EmberZNet" + assert state_before_install.attributes["installed_version"] == "7.3.1.0" + assert state_before_install.attributes["latest_version"] == "7.4.4.0" + + mock_flasher = AsyncMock() + mock_flasher.flash_firmware.side_effect = RuntimeError( + "Something broke during flashing!" + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=Mock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=mock_flasher, + ), + pytest.raises(HomeAssistantError, match="Failed to flash firmware"), + ): + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # After the firmware update fails, we can still try again + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "on" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.3.1.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_installation_probe_failure( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test installation failing during post-flashing probing.""" + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + state_before_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_install is not None + assert state_before_install.state == "on" + assert state_before_install.attributes["title"] == "EmberZNet" + assert state_before_install.attributes["installed_version"] == "7.3.1.0" + assert state_before_install.attributes["latest_version"] == "7.4.4.0" + + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=Mock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=AsyncMock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.probe_silabs_firmware_info", + return_value=None, + ), + pytest.raises( + HomeAssistantError, match="Failed to probe the firmware after flashing" + ), + ): + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # After the firmware update fails, we can still try again + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "on" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.3.1.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_state_restoration( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity state restoration.""" + + mock_restore_cache_with_extra_data( + hass, + [ + ( + State(TEST_UPDATE_ENTITY_ID, "on"), + FirmwareUpdateExtraStoredData( + firmware_manifest=TEST_MANIFEST + ).as_dict(), + ) + ], + ) + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # The state is correctly restored + state = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state is not None + assert state.state == "on" + assert state.attributes["title"] == "EmberZNet" + assert state.attributes["installed_version"] == "7.3.1.0" + assert state.attributes["latest_version"] == "7.4.4.0" + assert state.attributes["release_summary"] == ("Some release notes go here") + assert state.attributes["release_url"] == ("https://example.org/release_notes") + + +async def test_update_entity_firmware_missing_from_manifest( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity handles missing firmware.""" + + mock_restore_cache_with_extra_data( + hass, + [ + ( + State(TEST_UPDATE_ENTITY_ID, "on"), + # Ensure the manifest does not contain our expected firmware type + FirmwareUpdateExtraStoredData( + firmware_manifest=dataclasses.replace(TEST_MANIFEST, firmwares=()) + ).as_dict(), + ) + ], + ) + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # The state is restored, accounting for the missing firmware + state = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state is not None + assert state.state == "unknown" + assert state.attributes["title"] == "EmberZNet" + assert state.attributes["installed_version"] == "7.3.1.0" + assert state.attributes["latest_version"] is None + assert state.attributes["release_summary"] is None + assert state.attributes["release_url"] is None + + +async def test_update_entity_graceful_firmware_type_callback_errors( + hass: HomeAssistant, + update_config_entry: ConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test firmware update entity handling of firmware type callback errors.""" + + session = async_get_clientsession(hass) + update_entity = MockFirmwareUpdateEntity( + device=TEST_DEVICE, + config_entry=update_config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + TEST_FIRMWARE_RELEASES_URL, + ), + entity_description=TEST_FIRMWARE_ENTITY_DESCRIPTIONS[ApplicationType.EZSP], + ) + update_entity.hass = hass + await update_entity.async_added_to_hass() + + callback = Mock(side_effect=RuntimeError("Callback failed")) + unregister_callback = update_entity.add_firmware_type_changed_callback(callback) + + with caplog.at_level(logging.WARNING): + await async_notify_firmware_info( + hass, + "some_integration", + FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="probe", + ), + ) + + unregister_callback() + assert "Failed to call firmware type changed callback" in caplog.text diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py index b467380c431..1b7bfe4a8ac 100644 --- a/tests/components/homeassistant_hardware/test_util.py +++ b/tests/components/homeassistant_hardware/test_util.py @@ -205,6 +205,93 @@ async def test_owning_addon(hass: HomeAssistant) -> None: assert (await owning_addon.is_running(hass)) is False +async def test_owning_addon_temporarily_stop_info_error(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping with an info error.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.side_effect = AddonError() + + with patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ): + async with owning_addon.temporarily_stop(hass): + pass + + # We never restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 0 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 0 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 0 + + +async def test_owning_addon_temporarily_stop_not_running(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping when the addon is not running.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname="core_some_addon_slug", + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + with patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ): + async with owning_addon.temporarily_stop(hass): + pass + + # We never restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 0 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 0 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 0 + + +async def test_owning_addon_temporarily_stop(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping when the addon is running.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname="core_some_addon_slug", + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + mock_manager.async_stop_addon = AsyncMock() + mock_manager.async_wait_until_addon_state = AsyncMock() + mock_manager.async_start_addon_waiting = AsyncMock() + + # The error is propagated but it doesn't affect restarting the addon + with ( + patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ), + pytest.raises(RuntimeError), + ): + async with owning_addon.temporarily_stop(hass): + raise RuntimeError("Some error") + + # We restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 1 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 1 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 1 + + async def test_owning_integration(hass: HomeAssistant) -> None: """Test `OwningIntegration`.""" config_entry = MockConfigEntry(domain="mock_domain", unique_id="some_unique_id") @@ -225,6 +312,67 @@ async def test_owning_integration(hass: HomeAssistant) -> None: assert (await owning_integration2.is_running(hass)) is False +async def test_owning_integration_temporarily_stop_missing_entry( + hass: HomeAssistant, +) -> None: + """Test temporarily stopping the integration when the config entry doesn't exist.""" + missing_integration = OwningIntegration(config_entry_id="missing_entry_id") + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + ): + async with missing_integration.temporarily_stop(hass): + pass + + # Because there's no matching entry, no unload or setup calls are made + assert len(mock_unload.mock_calls) == 0 + assert len(mock_setup.mock_calls) == 0 + + +async def test_owning_integration_temporarily_stop_not_loaded( + hass: HomeAssistant, +) -> None: + """Test temporarily stopping the integration when the config entry is not loaded.""" + entry = MockConfigEntry(domain="test_domain") + entry.add_to_hass(hass) + entry.mock_state(hass, ConfigEntryState.NOT_LOADED) + + integration = OwningIntegration(config_entry_id=entry.entry_id) + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + ): + async with integration.temporarily_stop(hass): + pass + + # Since the entry was not loaded, we never unload or re-setup + assert len(mock_unload.mock_calls) == 0 + assert len(mock_setup.mock_calls) == 0 + + +async def test_owning_integration_temporarily_stop_loaded(hass: HomeAssistant) -> None: + """Test temporarily stopping the integration when the config entry is loaded.""" + entry = MockConfigEntry(domain="test_domain") + entry.add_to_hass(hass) + entry.mock_state(hass, ConfigEntryState.LOADED) + + integration = OwningIntegration(config_entry_id=entry.entry_id) + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + pytest.raises(RuntimeError), + ): + async with integration.temporarily_stop(hass): + raise RuntimeError("Some error during the temporary stop") + + # We expect one unload followed by one setup call + mock_unload.assert_called_once_with(entry.entry_id) + mock_setup.assert_called_once_with(entry.entry_id) + + async def test_firmware_info(hass: HomeAssistant) -> None: """Test `FirmwareInfo`.""" diff --git a/tests/components/homeassistant_sky_connect/common.py b/tests/components/homeassistant_sky_connect/common.py new file mode 100644 index 00000000000..335fd6d2e12 --- /dev/null +++ b/tests/components/homeassistant_sky_connect/common.py @@ -0,0 +1,21 @@ +"""Common constants for the SkyConnect integration tests.""" + +from homeassistant.helpers.service_info.usb import UsbServiceInfo + +USB_DATA_SKY = UsbServiceInfo( + device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + vid="10C4", + pid="EA60", + serial_number="9e2adbd75b8beb119fe564a0f320645d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) + +USB_DATA_ZBT1 = UsbServiceInfo( + device="/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + vid="10C4", + pid="EA60", + serial_number="9e2adbd75b8beb119fe564a0f320645d", + manufacturer="Nabu Casa", + description="Home Assistant Connect ZBT-1", +) diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index d8542002ae8..44a5e0029c3 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -22,26 +22,10 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.service_info.usb import UsbServiceInfo +from .common import USB_DATA_SKY, USB_DATA_ZBT1 + from tests.common import MockConfigEntry -USB_DATA_SKY = UsbServiceInfo( - device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - vid="10C4", - pid="EA60", - serial_number="9e2adbd75b8beb119fe564a0f320645d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) - -USB_DATA_ZBT1 = UsbServiceInfo( - device="/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - vid="10C4", - pid="EA60", - serial_number="9e2adbd75b8beb119fe564a0f320645d", - manufacturer="Nabu Casa", - description="Home Assistant Connect ZBT-1", -) - @pytest.mark.parametrize( ("usb_data", "model"), @@ -76,7 +60,7 @@ async def test_config_flow( return_value=FirmwareInfo( device=usb_data.device, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -92,6 +76,7 @@ async def test_config_flow( config_entry = result["result"] assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", "device": usb_data.device, "manufacturer": usb_data.manufacturer, "pid": usb_data.pid, @@ -161,7 +146,7 @@ async def test_options_flow( return_value=FirmwareInfo( device=usb_data.device, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -177,6 +162,7 @@ async def test_options_flow( assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", "device": usb_data.device, "manufacturer": usb_data.manufacturer, "pid": usb_data.pid, diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index 8e90039a4fc..c467a9e0d60 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -44,7 +44,7 @@ async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.version == 1 - assert config_entry.minor_version == 2 + assert config_entry.minor_version == 3 assert config_entry.data == { "description": "SkyConnect v1.0", "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", @@ -54,6 +54,7 @@ async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: "manufacturer": "Nabu Casa", "product": "SkyConnect v1.0", # `description` has been copied to `product` "firmware": "spinel", # new key + "firmware_version": None, # new key } await hass.config_entries.async_unload(config_entry.entry_id) diff --git a/tests/components/homeassistant_sky_connect/test_update.py b/tests/components/homeassistant_sky_connect/test_update.py new file mode 100644 index 00000000000..9fb7528987e --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_update.py @@ -0,0 +1,86 @@ +"""Test SkyConnect firmware update entity.""" + +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import USB_DATA_ZBT1 + +from tests.common import MockConfigEntry + +UPDATE_ENTITY_ID = ( + "update.homeassistant_sky_connect_9e2adbd75b8beb119fe564a0f320645d_firmware" +) + + +async def test_zbt1_update_entity(hass: HomeAssistant) -> None: + """Test the ZBT-1 firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the ZBT-1 integration + zbt1_config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + "device": USB_DATA_ZBT1.device, + "manufacturer": USB_DATA_ZBT1.manufacturer, + "pid": USB_DATA_ZBT1.pid, + "product": USB_DATA_ZBT1.description, + "serial_number": USB_DATA_ZBT1.serial_number, + "vid": USB_DATA_ZBT1.vid, + }, + version=1, + minor_version=3, + ) + zbt1_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(zbt1_config_entry.entry_id) + await hass.async_block_till_done() + + # Pretend ZHA loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "zha", + FirmwareInfo( + device=USB_DATA_ZBT1.device, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[], + source="zha", + ), + ) + await hass.async_block_till_done() + + state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp.state == "unknown" + assert state_ezsp.attributes["title"] == "EmberZNet" + assert state_ezsp.attributes["installed_version"] == "7.3.1.0" + assert state_ezsp.attributes["latest_version"] is None + + # Now, have OTBR push some info + await async_notify_firmware_info( + hass, + "otbr", + FirmwareInfo( + device=USB_DATA_ZBT1.device, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="otbr", + ), + ) + await hass.async_block_till_done() + + # After the firmware update, the entity has the new version and the correct state + state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel.state == "unknown" + assert state_spinel.attributes["title"] == "OpenThread RCP" + assert state_spinel.attributes["installed_version"] == "2.4.4.0" + assert state_spinel.attributes["latest_version"] is None diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 78fd45c6b5b..46fec0a1f30 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -350,7 +350,7 @@ async def test_firmware_options_flow(hass: HomeAssistant) -> None: return_value=FirmwareInfo( device=RADIO_DEVICE, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -366,6 +366,7 @@ async def test_firmware_options_flow(hass: HomeAssistant) -> None: assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", } diff --git a/tests/components/homeassistant_yellow/test_update.py b/tests/components/homeassistant_yellow/test_update.py new file mode 100644 index 00000000000..269ff2afc49 --- /dev/null +++ b/tests/components/homeassistant_yellow/test_update.py @@ -0,0 +1,89 @@ +"""Test Yellow firmware update entity.""" + +from unittest.mock import patch + +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.homeassistant_yellow.const import RADIO_DEVICE +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + +UPDATE_ENTITY_ID = "update.homeassistant_yellow_firmware" + + +async def test_yellow_update_entity(hass: HomeAssistant) -> None: + """Test the Yellow firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the Yellow integration + yellow_config_entry = MockConfigEntry( + domain="homeassistant_yellow", + data={ + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + "device": RADIO_DEVICE, + }, + version=1, + minor_version=3, + ) + yellow_config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_yellow.is_hassio", return_value=True + ), + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ), + ): + assert await hass.config_entries.async_setup(yellow_config_entry.entry_id) + await hass.async_block_till_done() + + # Pretend ZHA loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "zha", + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[], + source="zha", + ), + ) + await hass.async_block_till_done() + + state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp.state == "unknown" + assert state_ezsp.attributes["title"] == "EmberZNet" + assert state_ezsp.attributes["installed_version"] == "7.3.1.0" + assert state_ezsp.attributes["latest_version"] is None + + # Now, have OTBR push some info + await async_notify_firmware_info( + hass, + "otbr", + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="otbr", + ), + ) + await hass.async_block_till_done() + + # After the firmware update, the entity has the new version and the correct state + state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel.state == "unknown" + assert state_spinel.attributes["title"] == "OpenThread RCP" + assert state_spinel.attributes["installed_version"] == "2.4.4.0" + assert state_spinel.attributes["latest_version"] is None diff --git a/tests/components/homee/fixtures/binary_sensors.json b/tests/components/homee/fixtures/binary_sensors.json new file mode 100644 index 00000000000..5ced5dc51da --- /dev/null +++ b/tests/components/homee/fixtures/binary_sensors.json @@ -0,0 +1,891 @@ +{ + "id": 1, + "name": "Test Binary Sensor", + "profile": 4026, + "image": "default", + "favorite": 0, + "order": 20, + "protocol": 1, + "routing": 0, + "state": 1, + "state_changed": 1709379826, + "added": 1676199446, + "history": 1, + "cube_type": 1, + "note": "", + "services": 5, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 69, + "state": 1, + "last_changed": 1706461181, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 2, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 17, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 3, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 132, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 4, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 228, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 5, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 12, + "state": 1, + "last_changed": 1699456267, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 6, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 52, + "state": 1, + "last_changed": 1694176210, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 7, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 68, + "state": 1, + "last_changed": 1694176210, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 8, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 139, + "state": 1, + "last_changed": 1650402359, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 9, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 232, + "state": 1, + "last_changed": 1711897362, + "changed_by": 4, + "changed_by_id": 5, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 10, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 54, + "state": 1, + "last_changed": 1650402359, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 11, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 70, + "state": 1, + "last_changed": 1738231378, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 4, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 12, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 78, + "state": 1, + "last_changed": 1738231378, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 4, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 13, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 77, + "state": 1, + "last_changed": 1735964135, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 14, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 25, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 15, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 330, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 16, + "node_id": 1, + "instance": 2, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 1, + "state": 1, + "last_changed": 1694024544, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "can_observe": [300], + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 17, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 14, + "state": 1, + "last_changed": 1739320320, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 18, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 143, + "state": 1, + "last_changed": 1694992768, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 19, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 140, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 20, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 76, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 21, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 182, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 22, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 101, + "state": 1, + "last_changed": 1700056646, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 23, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "n%2Fa", + "step_value": 1.0, + "editable": 0, + "type": 289, + "state": 1, + "last_changed": 1736106312, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 24, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 2, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 16, + "state": 1, + "last_changed": 1616314530, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 25, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 2, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 181, + "state": 1, + "last_changed": 1616314530, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 26, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 138, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 27, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 30, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 28, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 141, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 29, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 80, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + } + ] +} diff --git a/tests/components/homee/fixtures/lock.json b/tests/components/homee/fixtures/lock.json new file mode 100644 index 00000000000..79fd53e0311 --- /dev/null +++ b/tests/components/homee/fixtures/lock.json @@ -0,0 +1,52 @@ +{ + "id": 1, + "name": "Test Lock", + "profile": 2007, + "image": "default", + "favorite": 0, + "order": 31, + "protocol": 1, + "routing": 0, + "state": 1, + "state_changed": 1711799526, + "added": 1645036891, + "history": 1, + "cube_type": 1, + "note": "", + "services": 3, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 1, + "type": 232, + "state": 1, + "last_changed": 1711897362, + "changed_by": 4, + "changed_by_id": 5, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["toggle"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + } + ] +} diff --git a/tests/components/homee/snapshots/test_binary_sensor.ambr b/tests/components/homee/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..4926c048f5b --- /dev/null +++ b/tests/components/homee/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1392 @@ +# serializer version: 1 +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery', + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Binary Sensor Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_blackout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_blackout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Blackout', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blackout_alarm', + 'unique_id': '00055511EECC-1-2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_blackout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Blackout', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_blackout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'carbon_dioxide', + 'unique_id': '00055511EECC-1-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Carbon dioxide', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'carbon_monoxide', + 'unique_id': '00055511EECC-1-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'Test Binary Sensor Carbon monoxide', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_flood-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_flood', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flood', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flood', + 'unique_id': '00055511EECC-1-5', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_flood-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Flood', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_flood', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_high_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_high_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'High temperature', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'high_temperature', + 'unique_id': '00055511EECC-1-6', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_high_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Binary Sensor High temperature', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_high_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_leak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_leak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Leak', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'leak_alarm', + 'unique_id': '00055511EECC-1-7', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_leak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Leak', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_leak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Load', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_alarm', + 'unique_id': '00055511EECC-1-8', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Binary Sensor Load', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lock', + 'unique_id': '00055511EECC-1-9', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'lock', + 'friendly_name': 'Test Binary Sensor Lock', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_low_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_low_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Low temperature', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'low_temperature', + 'unique_id': '00055511EECC-1-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_low_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'cold', + 'friendly_name': 'Test Binary Sensor Low temperature', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_low_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_malfunction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_malfunction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Malfunction', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'malfunction', + 'unique_id': '00055511EECC-1-11', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_malfunction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Malfunction', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_malfunction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_maximum_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_maximum_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maximum level', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'maximum', + 'unique_id': '00055511EECC-1-12', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_maximum_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Maximum level', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_maximum_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_minimum_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_minimum_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Minimum level', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'minimum', + 'unique_id': '00055511EECC-1-13', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_minimum_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Minimum level', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_minimum_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '00055511EECC-1-14', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Test Binary Sensor Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motor_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_motor_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motor blocked', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motor_blocked', + 'unique_id': '00055511EECC-1-15', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motor_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Motor blocked', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_motor_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_opening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_opening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opening', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'opening', + 'unique_id': '00055511EECC-1-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_opening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Test Binary Sensor Opening', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_opening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overcurrent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_overcurrent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overcurrent', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overcurrent', + 'unique_id': '00055511EECC-1-18', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overcurrent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Overcurrent', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_overcurrent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overload-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_overload', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overload', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overload', + 'unique_id': '00055511EECC-1-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overload-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Overload', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_overload', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_plug-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_plug', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Plug', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'plug', + 'unique_id': '00055511EECC-1-16', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_plug-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'plug', + 'friendly_name': 'Test Binary Sensor Plug', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_plug', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power', + 'unique_id': '00055511EECC-1-21', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Binary Sensor Power', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'presence', + 'unique_id': '00055511EECC-1-20', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test Binary Sensor Presence', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_rain-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_rain', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rain', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rain', + 'unique_id': '00055511EECC-1-22', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_rain-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Rain', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_replace_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_replace_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Replace filter', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'replace_filter', + 'unique_id': '00055511EECC-1-23', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_replace_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Replace filter', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_replace_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_smoke', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smoke', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smoke', + 'unique_id': '00055511EECC-1-24', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'Test Binary Sensor Smoke', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_storage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_storage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Storage', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage', + 'unique_id': '00055511EECC-1-25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_storage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Storage', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_storage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_surge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_surge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Surge', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'surge', + 'unique_id': '00055511EECC-1-26', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_surge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Surge', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_surge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tamper', + 'unique_id': '00055511EECC-1-27', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Test Binary Sensor Tamper', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_voltage_drop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_voltage_drop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage drop', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_drop', + 'unique_id': '00055511EECC-1-28', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_voltage_drop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Voltage drop', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_voltage_drop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water', + 'unique_id': '00055511EECC-1-29', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Water', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/homee/snapshots/test_lock.ambr b/tests/components/homee/snapshots/test_lock.ambr new file mode 100644 index 00000000000..d055039cca4 --- /dev/null +++ b/tests/components/homee/snapshots/test_lock.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_lock_snapshot[lock.test_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.test_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock_snapshot[lock.test_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': 'unknown-5', + 'friendly_name': 'Test Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.test_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/homee/test_binary_sensor.py b/tests/components/homee/test_binary_sensor.py new file mode 100644 index 00000000000..50662616379 --- /dev/null +++ b/tests/components/homee/test_binary_sensor.py @@ -0,0 +1,29 @@ +"""Test homee binary sensors.""" + +from unittest.mock import MagicMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_sensor_snapshot( + hass: HomeAssistant, + mock_homee: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the multisensor snapshot.""" + mock_homee.nodes = [build_mock_node("binary_sensors.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + with patch("homeassistant.components.homee.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/homee/test_lock.py b/tests/components/homee/test_lock.py new file mode 100644 index 00000000000..3e6ff3f8ec6 --- /dev/null +++ b/tests/components/homee/test_lock.py @@ -0,0 +1,125 @@ +"""Test Homee locks.""" + +from unittest.mock import MagicMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + SERVICE_LOCK, + SERVICE_UNLOCK, + LockState, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def setup_lock( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homee: MagicMock +) -> None: + """Setups the integration lock tests.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + await setup_integration(hass, mock_config_entry) + + +@pytest.mark.parametrize( + ("service", "target_value"), + [ + (SERVICE_LOCK, 1), + (SERVICE_UNLOCK, 0), + ], +) +async def test_lock_services( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + service: str, + target_value: int, +) -> None: + """Test lock services.""" + await setup_lock(hass, mock_config_entry, mock_homee) + + await hass.services.async_call( + LOCK_DOMAIN, + service, + {ATTR_ENTITY_ID: "lock.test_lock"}, + ) + mock_homee.set_value.assert_called_once_with(1, 1, target_value) + + +@pytest.mark.parametrize( + ("target_value", "current_value", "expected"), + [ + (1.0, 1.0, LockState.LOCKED), + (0.0, 0.0, LockState.UNLOCKED), + (1.0, 0.0, LockState.LOCKING), + (0.0, 1.0, LockState.UNLOCKING), + ], +) +async def test_lock_state( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + target_value: float, + current_value: float, + expected: LockState, +) -> None: + """Test lock state.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + attribute = mock_homee.nodes[0].attributes[0] + attribute.target_value = target_value + attribute.current_value = current_value + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").state == expected + + +@pytest.mark.parametrize( + ("attr_changed_by", "changed_by_id", "expected"), + [ + (1, 0, "itself-0"), + (2, 1, "user-testuser"), + (3, 54, "homeegram-54"), + (6, 0, "ai-0"), + ], +) +async def test_lock_changed_by( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + attr_changed_by: int, + changed_by_id: int, + expected: str, +) -> None: + """Test lock changed by entries.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + mock_homee.get_user_by_id.return_value = MagicMock(username="testuser") + attribute = mock_homee.nodes[0].attributes[0] + attribute.changed_by = attr_changed_by + attribute.changed_by_id = changed_by_id + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").attributes["changed_by"] == expected + + +async def test_lock_snapshot( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the lock snapshots.""" + with patch("homeassistant.components.homee.PLATFORMS", [Platform.LOCK]): + await setup_lock(hass, mock_config_entry, mock_homee) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 141141e7f15..6a30877a795 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -12,6 +12,7 @@ from homeassistant.components.homekit.const import ( TYPE_VALVE, ) from homeassistant.components.homekit.type_switches import ( + LawnMower, Outlet, SelectSwitch, Switch, @@ -19,6 +20,13 @@ from homeassistant.components.homekit.type_switches import ( Valve, ValveSwitch, ) +from homeassistant.components.lawn_mower import ( + DOMAIN as LAWN_MOWER_DOMAIN, + SERVICE_DOCK, + SERVICE_START_MOWING, + LawnMowerActivity, + LawnMowerEntityFeature, +) from homeassistant.components.select import ATTR_OPTIONS from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, @@ -383,6 +391,73 @@ async def test_vacuum_set_state_without_returnhome_and_start_support( assert events[-1].data[ATTR_VALUE] is None +async def test_lawn_mower_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: + """Test if Lawn mower accessory and HA are updated accordingly.""" + entity_id = "lawn_mower.mower" + + hass.states.async_set( + entity_id, + None, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + + acc = LawnMower(hass, hk_driver, "LawnMower", entity_id, 2, None) + acc.run() + await hass.async_block_till_done() + assert acc.aid == 2 + assert acc.category == 8 # Switch + + assert acc.char_on.value == 0 + + hass.states.async_set( + entity_id, + LawnMowerActivity.MOWING, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + assert acc.char_on.value == 1 + + hass.states.async_set( + entity_id, + LawnMowerActivity.DOCKED, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + assert acc.char_on.value == 0 + + # Set from HomeKit + call_turn_on = async_mock_service(hass, LAWN_MOWER_DOMAIN, SERVICE_START_MOWING) + call_turn_off = async_mock_service(hass, LAWN_MOWER_DOMAIN, SERVICE_DOCK) + + acc.char_on.client_update_value(1) + await hass.async_block_till_done() + assert acc.char_on.value == 1 + assert call_turn_on + assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 1 + assert events[-1].data[ATTR_VALUE] is None + + acc.char_on.client_update_value(0) + await hass.async_block_till_done() + assert acc.char_on.value == 0 + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 2 + assert events[-1].data[ATTR_VALUE] is None + + async def test_reset_switch( hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index fc4cfa78ca4..69c347ef55a 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -69,7 +69,6 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, - CONF_TEMPERATURE_UNIT, EVENT_HOMEASSISTANT_START, STATE_UNAVAILABLE, STATE_UNKNOWN, @@ -77,6 +76,7 @@ from homeassistant.const import ( ) from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from tests.common import async_mock_service @@ -858,6 +858,7 @@ async def test_thermostat_fahrenheit( ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" + hass.config.units = US_CUSTOMARY_SYSTEM # support_ = True hass.states.async_set( @@ -869,10 +870,7 @@ async def test_thermostat_fahrenheit( }, ) await hass.async_block_till_done() - with patch.object( - hass.config.units, CONF_TEMPERATURE_UNIT, new=UnitOfTemperature.FAHRENHEIT - ): - acc = Thermostat(hass, hk_driver, "Climate", entity_id, 1, None) + acc = Thermostat(hass, hk_driver, "Climate", entity_id, 1, None) hk_driver.add_accessory(acc) acc.run() await hass.async_block_till_done() @@ -1786,13 +1784,11 @@ async def test_water_heater_fahrenheit( ) -> None: """Test if accessory and HA are update accordingly.""" entity_id = "water_heater.test" + hass.config.units = US_CUSTOMARY_SYSTEM hass.states.async_set(entity_id, HVACMode.HEAT) await hass.async_block_till_done() - with patch.object( - hass.config.units, CONF_TEMPERATURE_UNIT, new=UnitOfTemperature.FAHRENHEIT - ): - acc = WaterHeater(hass, hk_driver, "WaterHeater", entity_id, 2, None) + acc = WaterHeater(hass, hk_driver, "WaterHeater", entity_id, 2, None) acc.run() await hass.async_block_till_done() diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index a41964d98cc..62b53df33f2 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -14352,7 +14352,7 @@ 'original_name': 'LG webOS TV AF80', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_48', 'unit_of_measurement': None, @@ -14371,7 +14371,7 @@ 'AV', 'HDMI 4', ]), - 'supported_features': , + 'supported_features': , }), 'entity_id': 'media_player.lg_webos_tv_af80', 'state': 'on', diff --git a/tests/components/homekit_controller/specific_devices/test_lg_tv.py b/tests/components/homekit_controller/specific_devices/test_lg_tv.py new file mode 100644 index 00000000000..48d1fc3ebdc --- /dev/null +++ b/tests/components/homekit_controller/specific_devices/test_lg_tv.py @@ -0,0 +1,56 @@ +"""Test against characteristics captured from an LG TV.""" + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE_LIST, + MediaPlayerEntityFeature, +) +from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_ON +from homeassistant.core import HomeAssistant + +from ..common import ( + HUB_TEST_ACCESSORY_ID, + DeviceTestInfo, + assert_devices_and_entities_created, + setup_accessories_from_file, + setup_test_accessories, +) + + +async def test_lg_tv_setup(hass: HomeAssistant) -> None: + """Test that a LG TV can be correctly setup in HA.""" + accessories = await setup_accessories_from_file(hass, "lg_tv.json") + await setup_test_accessories(hass, accessories) + + await assert_devices_and_entities_created( + hass, + DeviceTestInfo( + unique_id=HUB_TEST_ACCESSORY_ID, + name="LG webOS TV AF80", + model="OLED55B9PUA", + manufacturer="LG Electronics", + sw_version="04.71.04", + hw_version="1", + serial_number="A0000A000000000A", + devices=[], + entities=[], + ), + ) + + state = hass.states.get("media_player.lg_webos_tv_af80") + assert state is not None + assert state.state == STATE_ON + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == [ + "AirPlay", + "Live TV", + "HDMI 1", + "Sony", + "Apple", + "AV", + "HDMI 4", + ] + features = state.attributes[ATTR_SUPPORTED_FEATURES] + assert features & MediaPlayerEntityFeature.TURN_ON + assert features & MediaPlayerEntityFeature.TURN_OFF + assert features & MediaPlayerEntityFeature.SELECT_SOURCE + assert features & MediaPlayerEntityFeature.PLAY + assert features & MediaPlayerEntityFeature.PAUSE diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index d1d280ef265..e00dde92a81 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -10,6 +10,11 @@ from aiohomekit.model.characteristics import ( from aiohomekit.model.services import Service, ServicesTypes import pytest +from homeassistant.components.media_player import ( + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -408,3 +413,57 @@ async def test_migrate_unique_id( entity_registry.async_get(media_player_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" ) + + +async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: + """Test that we can turn on a media player.""" + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) + + await helper.async_update( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.CURRENT_MEDIA_STATE: 0, + }, + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_ON, + {"entity_id": "media_player.testdevice"}, + blocking=True, + ) + helper.async_assert_service_values( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.ACTIVE: 1, + }, + ) + + +async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: + """Test that we can turn off a media player.""" + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) + + await helper.async_update( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.CURRENT_MEDIA_STATE: 0, + }, + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, + {"entity_id": "media_player.testdevice"}, + blocking=True, + ) + helper.async_assert_service_values( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.ACTIVE: 0, + }, + ) diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index c831d40d261..3d323d4d31c 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -42,8 +42,8 @@ async def test_lights( assert light_1.attributes["min_mireds"] == 153 assert light_1.attributes["max_mireds"] == 500 assert light_1.attributes["dynamics"] == "dynamic_palette" - assert light_1.attributes["effect_list"] == ["None", "candle", "fire"] - assert light_1.attributes["effect"] == "None" + assert light_1.attributes["effect_list"] == ["off", "candle", "fire"] + assert light_1.attributes["effect"] == "off" # test light which supports color temperature only light_2 = hass.states.get("light.hue_light_with_color_temperature_only") @@ -57,7 +57,7 @@ async def test_lights( assert light_2.attributes["min_mireds"] == 153 assert light_2.attributes["max_mireds"] == 454 assert light_2.attributes["dynamics"] == "none" - assert light_2.attributes["effect_list"] == ["None", "candle", "sunrise"] + assert light_2.attributes["effect_list"] == ["off", "candle", "sunrise"] # test light which supports color only light_3 = hass.states.get("light.hue_light_with_color_only") @@ -201,7 +201,7 @@ async def test_light_turn_on_service( await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 8 @@ -216,14 +216,14 @@ async def test_light_turn_on_service( await hass.async_block_till_done() test_light = hass.states.get(test_light_id) assert test_light is not None - assert test_light.attributes["effect"] == "None" + assert test_light.attributes["effect"] == "off" # test turn on with useless effect # it should send a effect in the request if the device has no effect active await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 9 diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index 8ab2f96e42f..06e11ec1252 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -176,13 +176,15 @@ ], "statistics": { "cuttingBladeUsageTime": 123, + "downTime": 3600, "numberOfChargingCycles": 1380, "numberOfCollisions": 11396, "totalChargingTime": 4334400, "totalCuttingTime": 4194000, "totalDriveDistance": 1780272, "totalRunningTime": 4564800, - "totalSearchingTime": 370800 + "totalSearchingTime": 370800, + "upTime": 7200 }, "stayOutZones": { "dirty": false, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 2dab82451a6..d5546b0d2af 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -95,6 +95,7 @@ }), 'statistics': dict({ 'cutting_blade_usage_time': 123, + 'downtime': 3600, 'number_of_charging_cycles': 1380, 'number_of_collisions': 11396, 'total_charging_time': 4334400, @@ -102,6 +103,7 @@ 'total_drive_distance': 1780272, 'total_running_time': 4564800, 'total_searching_time': 370800, + 'uptime': 7200, }), 'stay_out_zones': dict({ 'dirty': False, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 02a64718276..92320de6fdb 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -106,6 +106,64 @@ 'state': '0.034', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_downtime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Downtime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1208,6 +1266,64 @@ 'state': '103.000', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_uptime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Uptime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_work_area-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index 55bf5dda7eb..814846ae1c6 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -79,7 +79,7 @@ async def test_number_workarea_commands( freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) async_fire_time_changed(hass) await hass.async_block_till_done() - mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) + mocked_method.assert_called_once_with(TEST_MOWER_ID, 123456, cutting_height=75) state = hass.states.get(entity_id) assert state.state is not None assert state.state == "75" diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 4536c64349c..9d883502d28 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -10,6 +10,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.bluetooth import BluetoothChange from homeassistant.components.improv_ble.const import DOMAIN +from homeassistant.config_entries import SOURCE_IGNORE from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -21,6 +22,8 @@ from . import ( PROVISIONED_IMPROV_BLE_DISCOVERY_INFO, ) +from tests.common import MockConfigEntry + IMPROV_BLE = "homeassistant.components.improv_ble" @@ -118,6 +121,32 @@ async def test_async_step_user_takes_precedence_over_discovery( assert not hass.config_entries.flow.async_progress(DOMAIN) +async def test_user_setup_removes_ignored_entry(hass: HomeAssistant) -> None: + """Test the user initiated form can replace an ignored device.""" + ignored_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=IMPROV_BLE_DISCOVERY_INFO.address, + source=SOURCE_IGNORE, + ) + ignored_entry.add_to_hass(hass) + with patch( + f"{IMPROV_BLE}.config_flow.bluetooth.async_discovered_service_info", + return_value=[NOT_IMPROV_BLE_DISCOVERY_INFO, IMPROV_BLE_DISCOVERY_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + await _test_common_success_wo_identify( + hass, result, IMPROV_BLE_DISCOVERY_INFO.address + ) + # Check the ignored entry is removed + assert not hass.config_entries.async_entries(DOMAIN) + + async def test_bluetooth_step_provisioned_device(hass: HomeAssistant) -> None: """Test bluetooth step when device is already provisioned.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/iometer/__init__.py b/tests/components/iometer/__init__.py index 5c08438925e..19fe2124f1f 100644 --- a/tests/components/iometer/__init__.py +++ b/tests/components/iometer/__init__.py @@ -1 +1,19 @@ """Tests for the IOmeter integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Fixture for setting up the IOmeter platform.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.iometer.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/iometer/conftest.py b/tests/components/iometer/conftest.py index ee45021952e..f8139c7c64c 100644 --- a/tests/components/iometer/conftest.py +++ b/tests/components/iometer/conftest.py @@ -54,4 +54,5 @@ def mock_config_entry() -> MockConfigEntry: title="IOmeter-1ISK0000000000", data={CONF_HOST: "10.0.0.2"}, unique_id="658c2b34-2017-45f2-a12b-731235f8bb97", + entry_id="01JQ6G5395176MAAWKAAPEZHV6", ) diff --git a/tests/components/iometer/snapshots/test_binary_sensor.ambr b/tests/components/iometer/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..38aab735a14 --- /dev/null +++ b/tests/components/iometer/snapshots/test_binary_sensor.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core attachment status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'attachment_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_attachment_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core attachment status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core/Bridge connection status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core/Bridge connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/iometer/test_binary_sensor.py b/tests/components/iometer/test_binary_sensor.py new file mode 100644 index 00000000000..e007084567e --- /dev/null +++ b/tests/components/iometer/test_binary_sensor.py @@ -0,0 +1,135 @@ +"""Test the IOmeter binary sensors.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_iometer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensors.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_connection_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.connection_status = "disconnected" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = "detached" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors_unkown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = None + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_UNKNOWN + ) diff --git a/tests/components/iometer/test_init.py b/tests/components/iometer/test_init.py new file mode 100644 index 00000000000..9d8eadc5079 --- /dev/null +++ b/tests/components/iometer/test_init.py @@ -0,0 +1,44 @@ +"""Tests for the AirGradient integration.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.iometer.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_new_firmware_version( + hass: HomeAssistant, + mock_iometer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test device registry integration.""" + # await setup_integration(hass, mock_config_entry) + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "build-58/build-65" + mock_iometer_client.get_current_status.return_value.device.core.version = "build-62" + mock_iometer_client.get_current_status.return_value.device.bridge.version = ( + "build-69" + ) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "build-62/build-69" diff --git a/tests/components/jellyfin/fixtures/get-media-folders.json b/tests/components/jellyfin/fixtures/get-media-folders.json index ff87751a9da..f6b5c1e8d78 100644 --- a/tests/components/jellyfin/fixtures/get-media-folders.json +++ b/tests/components/jellyfin/fixtures/get-media-folders.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "tvshows", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/fixtures/sessions.json b/tests/components/jellyfin/fixtures/sessions.json index 00a1f5265db..db2b691dff0 100644 --- a/tests/components/jellyfin/fixtures/sessions.json +++ b/tests/components/jellyfin/fixtures/sessions.json @@ -4346,6 +4346,7 @@ ], "Album": "ALBUM", "AlbumId": "ALBUM-UUID", + "AlbumPrimaryImageTag": "ALBUM-PRIMARY-IMAGE-TAG", "AlbumArtist": "Album Artist", "AlbumArtists": [ { "Name": "Album Artist", "Id": "9a65b2c222ddb34e51f5cae360fad3a1" } diff --git a/tests/components/jellyfin/fixtures/user-items-parent-id.json b/tests/components/jellyfin/fixtures/user-items-parent-id.json index 2e06c30894c..cd0232894bc 100644 --- a/tests/components/jellyfin/fixtures/user-items-parent-id.json +++ b/tests/components/jellyfin/fixtures/user-items-parent-id.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "string", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/snapshots/test_diagnostics.ambr b/tests/components/jellyfin/snapshots/test_diagnostics.ambr index c992628f034..9d73ee6397c 100644 --- a/tests/components/jellyfin/snapshots/test_diagnostics.ambr +++ b/tests/components/jellyfin/snapshots/test_diagnostics.ambr @@ -1707,6 +1707,7 @@ }), ]), 'AlbumId': 'ALBUM-UUID', + 'AlbumPrimaryImageTag': 'ALBUM-PRIMARY-IMAGE-TAG', 'ArtistItems': list([ dict({ 'Id': '1d864900526d9a9513b489f1cc28f8ca', diff --git a/tests/components/jellyfin/test_media_player.py b/tests/components/jellyfin/test_media_player.py index 3263639a32f..c6f015e9bb4 100644 --- a/tests/components/jellyfin/test_media_player.py +++ b/tests/components/jellyfin/test_media_player.py @@ -27,6 +27,7 @@ from homeassistant.components.media_player import ( from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, ATTR_ICON, ) @@ -124,6 +125,10 @@ async def test_media_player_music( assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None assert state.attributes.get(ATTR_MEDIA_SEASON) is None assert state.attributes.get(ATTR_MEDIA_EPISODE) is None + assert ( + state.attributes.get(ATTR_ENTITY_PICTURE) + == "http://localhost/Items/ALBUM-UUID/Images/Primary.jpg" + ) entry = entity_registry.async_get(state.entity_id) assert entry diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index ba0a2b4835e..dc66c1e0d7d 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -3,8 +3,6 @@ from collections import namedtuple from datetime import datetime -from freezegun import freeze_time as alter_time # noqa: F401 - from homeassistant.components import jewish_calendar from homeassistant.util import dt as dt_util @@ -49,7 +47,7 @@ def make_jerusalem_test_params(dtime, results, havdalah_offset=0): } return ( dtime, - jewish_calendar.DEFAULT_CANDLE_LIGHT, + 40, havdalah_offset, False, "Asia/Jerusalem", diff --git a/tests/components/jewish_calendar/test_binary_sensor.py b/tests/components/jewish_calendar/test_binary_sensor.py index 5cfaaedfc72..194e6fe9d01 100644 --- a/tests/components/jewish_calendar/test_binary_sensor.py +++ b/tests/components/jewish_calendar/test_binary_sensor.py @@ -3,6 +3,7 @@ from datetime import datetime as dt, timedelta import logging +from freezegun import freeze_time import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN @@ -18,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import alter_time, make_jerusalem_test_params, make_nyc_test_params +from . import make_jerusalem_test_params, make_nyc_test_params from tests.common import MockConfigEntry, async_fire_time_changed @@ -191,7 +192,7 @@ async def test_issur_melacha_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -213,7 +214,7 @@ async def test_issur_melacha_sensor( == result["state"] ) - with alter_time(result["update"]): + with freeze_time(result["update"]): async_fire_time_changed(hass, result["update"]) await hass.async_block_till_done() assert ( @@ -264,7 +265,7 @@ async def test_issur_melacha_sensor_update( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -286,7 +287,7 @@ async def test_issur_melacha_sensor_update( ) test_time += timedelta(microseconds=1) - with alter_time(test_time): + with freeze_time(test_time): async_fire_time_changed(hass, test_time) await hass.async_block_till_done() assert ( diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index cb982afec0f..6a4f57513fa 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1 +1,44 @@ """Tests for the Jewish Calendar component's init.""" + +import pytest + +from homeassistant.components.jewish_calendar.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("old_key", "new_key"), + [ + ("first_light", "alot_hashachar"), + ("sunset", "shkia"), + ("havdalah", "havdalah"), # Test no change + ], +) +async def test_migrate_unique_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + old_key: str, + new_key: str, +) -> None: + """Test unique id migration.""" + entry = MockConfigEntry(domain=DOMAIN, data={}) + entry.add_to_hass(hass) + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=f"{entry.entry_id}-{old_key}", + config_entry=entry, + ) + assert entity.unique_id.endswith(f"-{old_key}") + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_migrated = entity_registry.async_get(entity.entity_id) + assert entity_migrated + assert entity_migrated.unique_id == f"{entry.entry_id}-{new_key}" diff --git a/tests/components/jewish_calendar/test_sensor.py b/tests/components/jewish_calendar/test_sensor.py index aac0f583b05..bc9e69a9717 100644 --- a/tests/components/jewish_calendar/test_sensor.py +++ b/tests/components/jewish_calendar/test_sensor.py @@ -2,10 +2,11 @@ from datetime import datetime as dt, timedelta -from hdate import htables +from freezegun import freeze_time +from hdate.holidays import HolidayDatabase +from hdate.parasha import Parasha import pytest -from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -13,12 +14,13 @@ from homeassistant.components.jewish_calendar.const import ( DEFAULT_NAME, DOMAIN, ) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import CONF_LANGUAGE, CONF_PLATFORM from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import alter_time, make_jerusalem_test_params, make_nyc_test_params +from . import make_jerusalem_test_params, make_nyc_test_params from tests.common import MockConfigEntry, async_fire_time_changed @@ -92,8 +94,7 @@ TEST_PARAMS = [ "icon": "mdi:calendar-star", "id": "rosh_hashana_i", "type": "YOM_TOV", - "type_id": 1, - "options": htables.get_all_holidays("english"), + "options": HolidayDatabase(False).get_all_names("english"), }, ), ( @@ -111,8 +112,7 @@ TEST_PARAMS = [ "icon": "mdi:calendar-star", "id": "chanukah, rosh_chodesh", "type": "MELACHA_PERMITTED_HOLIDAY, ROSH_CHODESH", - "type_id": "4, 10", - "options": htables.get_all_holidays("english"), + "options": HolidayDatabase(False).get_all_names("english"), }, ), ( @@ -128,7 +128,7 @@ TEST_PARAMS = [ "device_class": "enum", "friendly_name": "Jewish Calendar Parshat Hashavua", "icon": "mdi:book-open-variant", - "options": [p.hebrew for p in htables.PARASHAOT], + "options": list(Parasha), }, ), ( @@ -139,7 +139,7 @@ TEST_PARAMS = [ "hebrew", "t_set_hakochavim", True, - dt(2018, 9, 8, 19, 45), + dt(2018, 9, 8, 19, 47), None, ), ( @@ -150,7 +150,7 @@ TEST_PARAMS = [ "hebrew", "t_set_hakochavim", False, - dt(2018, 9, 8, 19, 19), + dt(2018, 9, 8, 19, 21), None, ), ( @@ -185,9 +185,9 @@ TEST_PARAMS = [ False, "ו' מרחשוון ה' תשע\"ט", { - "hebrew_year": 5779, + "hebrew_year": "5779", "hebrew_month_name": "מרחשוון", - "hebrew_day": 6, + "hebrew_day": "6", "icon": "mdi:star-david", "friendly_name": "Jewish Calendar Date", }, @@ -245,7 +245,7 @@ async def test_jewish_calendar_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -258,7 +258,7 @@ async def test_jewish_calendar_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -424,9 +424,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 7), + "english_upcoming_candle_lighting": dt(2018, 9, 30, 17, 45), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 1), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", @@ -437,22 +437,22 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 7), + "english_upcoming_candle_lighting": dt(2018, 9, 30, 17, 45), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 1), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", - "english_holiday": "Shmini Atzeret", - "hebrew_holiday": "שמיני עצרת", + "english_holiday": "Shmini Atzeret, Simchat Torah", + "hebrew_holiday": "שמיני עצרת, שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 54), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", @@ -487,9 +487,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -500,9 +500,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -513,9 +513,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -587,7 +587,7 @@ async def test_shabbat_times_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -604,7 +604,7 @@ async def test_shabbat_times_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -649,13 +649,13 @@ async def test_omer_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Omer Count sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -684,13 +684,13 @@ async def test_dafyomi_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Daf Yomi sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/jewish_calendar/test_service.py b/tests/components/jewish_calendar/test_service.py new file mode 100644 index 00000000000..9eb80e5e7f0 --- /dev/null +++ b/tests/components/jewish_calendar/test_service.py @@ -0,0 +1,55 @@ +"""Test jewish calendar service.""" + +import datetime as dt + +from hdate.translator import Language +import pytest + +from homeassistant.components.jewish_calendar.const import DOMAIN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("test_date", "nusach", "language", "expected"), + [ + pytest.param(dt.date(2025, 3, 20), "sfarad", "he", "", id="no_blessing"), + pytest.param( + dt.date(2025, 5, 20), + "ashkenaz", + "he", + "היום שבעה ושלושים יום שהם חמישה שבועות ושני ימים בעומר", + id="ahskenaz-hebrew", + ), + pytest.param( + dt.date(2025, 5, 20), + "sfarad", + "en", + "Today is the thirty-seventh day, which are five weeks and two days of the Omer", + id="sefarad-english", + ), + ], +) +async def test_get_omer_blessing( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + test_date: dt.date, + nusach: str, + language: Language, + expected: str, +) -> None: + """Test get omer blessing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.services.async_call( + DOMAIN, + "count_omer", + {"date": test_date, "nusach": nusach, "language": language}, + blocking=True, + return_response=True, + ) + + assert result["message"] == expected diff --git a/tests/components/kitchen_sink/snapshots/test_init.ambr b/tests/components/kitchen_sink/snapshots/test_init.ambr index b91131eb2b0..fe22f19fb7a 100644 --- a/tests/components/kitchen_sink/snapshots/test_init.ambr +++ b/tests/components/kitchen_sink/snapshots/test_init.ambr @@ -48,5 +48,15 @@ 'type': 'no_state', }), ]), + 'sensor.statistics_issues_issue_5': list([ + dict({ + 'data': dict({ + 'metadata_mean_type': 1, + 'state_mean_type': 2, + 'statistic_id': 'sensor.statistics_issues_issue_5', + }), + 'type': 'mean_type_changed', + }), + ]), }) # --- diff --git a/tests/components/kitchen_sink/snapshots/test_sensor.ambr b/tests/components/kitchen_sink/snapshots/test_sensor.ambr index 7b433c40170..6cd9aa2e855 100644 --- a/tests/components/kitchen_sink/snapshots/test_sensor.ambr +++ b/tests/components/kitchen_sink/snapshots/test_sensor.ambr @@ -29,6 +29,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Statistics issues Issue 1', @@ -99,6 +113,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Sensor test', diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index 1eea1c8036b..88bacc2cb0b 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -96,6 +96,15 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "options_1" + section_marker, section_schema = list(result["data_schema"].schema.items())[0] + assert section_marker == "section_1" + section_schema_markers = list(section_schema.schema.schema) + assert len(section_schema_markers) == 2 + assert section_schema_markers[0] == "bool" + assert section_schema_markers[0].description is None + assert section_schema_markers[1] == "int" + assert section_schema_markers[1].description == {"suggested_value": 10} + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"section_1": {"bool": True, "int": 15}}, diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 50518f89107..526801aecfa 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.statistics import ( + StatisticMeanType, async_add_external_statistics, get_last_statistics, list_statistic_ids, @@ -45,6 +46,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": "Outdoor temperature", "source": DOMAIN, @@ -55,6 +57,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Energy consumption 1", "source": DOMAIN, diff --git a/tests/components/lamarzocco/fixtures/config.json b/tests/components/lamarzocco/fixtures/config.json index ea6e2ee76b8..5aac86dde97 100644 --- a/tests/components/lamarzocco/fixtures/config.json +++ b/tests/components/lamarzocco/fixtures/config.json @@ -101,28 +101,60 @@ "mode": "TypeB", "Group1": [ { + "mode": "TypeA", "groupNumber": "Group1", "doseType": "DoseA", "preWetTime": 0.5, "preWetHoldTime": 1 }, { + "mode": "TypeB", + "groupNumber": "Group1", + "doseType": "DoseA", + "preWetTime": 0, + "preWetHoldTime": 4 + }, + { + "mode": "TypeA", "groupNumber": "Group1", "doseType": "DoseB", "preWetTime": 0.5, "preWetHoldTime": 1 }, { + "mode": "TypeB", "groupNumber": "Group1", - "doseType": "DoseC", - "preWetTime": 3.2999999523162842, - "preWetHoldTime": 3.2999999523162842 + "doseType": "DoseB", + "preWetTime": 0, + "preWetHoldTime": 4 }, { + "mode": "TypeA", + "groupNumber": "Group1", + "doseType": "DoseC", + "preWetTime": 3.3, + "preWetHoldTime": 3.3 + }, + { + "mode": "TypeB", + "groupNumber": "Group1", + "doseType": "DoseC", + "preWetTime": 0, + "preWetHoldTime": 4 + }, + { + "mode": "TypeA", "groupNumber": "Group1", "doseType": "DoseD", "preWetTime": 2, "preWetHoldTime": 2 + }, + { + "mode": "TypeB", + "groupNumber": "Group1", + "doseType": "DoseD", + "preWetTime": 0, + "preWetHoldTime": 4 } ] }, diff --git a/tests/components/lamarzocco/fixtures/config_mini.json b/tests/components/lamarzocco/fixtures/config_mini.json index 22533a94872..a726d715a6f 100644 --- a/tests/components/lamarzocco/fixtures/config_mini.json +++ b/tests/components/lamarzocco/fixtures/config_mini.json @@ -82,10 +82,18 @@ "mode": "TypeB", "Group1": [ { + "mode": "TypeA", "groupNumber": "Group1", - "doseType": "DoseA", + "doseType": "Continuous", "preWetTime": 2, "preWetHoldTime": 3 + }, + { + "mode": "TypeB", + "groupNumber": "Group1", + "doseType": "Continuous", + "preWetTime": 0, + "preWetHoldTime": 3 } ] }, diff --git a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr index b1d8140b2ce..018449f7c9a 100644 --- a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr +++ b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr @@ -27,22 +27,46 @@ }), 'plumbed_in': True, 'prebrew_configuration': dict({ - '1': dict({ - 'off_time': 1, - 'on_time': 0.5, - }), - '2': dict({ - 'off_time': 1, - 'on_time': 0.5, - }), - '3': dict({ - 'off_time': 3.299999952316284, - 'on_time': 3.299999952316284, - }), - '4': dict({ - 'off_time': 2, - 'on_time': 2, - }), + '1': list([ + dict({ + 'off_time': 1, + 'on_time': 0.5, + }), + dict({ + 'off_time': 4, + 'on_time': 0, + }), + ]), + '2': list([ + dict({ + 'off_time': 1, + 'on_time': 0.5, + }), + dict({ + 'off_time': 4, + 'on_time': 0, + }), + ]), + '3': list([ + dict({ + 'off_time': 3.3, + 'on_time': 3.3, + }), + dict({ + 'off_time': 4, + 'on_time': 0, + }), + ]), + '4': list([ + dict({ + 'off_time': 2, + 'on_time': 2, + }), + dict({ + 'off_time': 4, + 'on_time': 0, + }), + ]), }), 'prebrew_mode': 'TypeB', 'scale': None, diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index 0748c9384a9..de1f11b14eb 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -419,7 +419,7 @@ 'state': '121', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-TypeA-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -438,7 +438,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-TypeA-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -457,7 +457,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-TypeA-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -473,10 +473,10 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3.29999995231628', + 'state': '3.3', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-TypeA-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -495,7 +495,7 @@ 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-TypeA-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -514,7 +514,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-TypeA-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -533,7 +533,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-TypeA-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -549,10 +549,10 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3.29999995231628', + 'state': '3.3', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-TypeA-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -587,7 +587,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '4', }) # --- # name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_2-state] @@ -606,7 +606,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '4', }) # --- # name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_3-state] @@ -625,7 +625,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3.29999995231628', + 'state': '4', }) # --- # name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_4-state] @@ -644,10 +644,10 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2', + 'state': '4', }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Linea Mini] +# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-TypeA-6-kwargs0-Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -666,7 +666,7 @@ 'state': '3', }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Linea Mini].1 +# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-TypeA-6-kwargs0-Linea Mini].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -705,7 +705,7 @@ 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Micra] +# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-TypeA-6-kwargs0-Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -724,7 +724,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Micra].1 +# name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-TypeA-6-kwargs0-Micra].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -763,7 +763,7 @@ 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Linea Mini] +# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-TypeA-6-kwargs1-Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -782,7 +782,7 @@ 'state': '3', }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Linea Mini].1 +# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-TypeA-6-kwargs1-Linea Mini].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -821,7 +821,7 @@ 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Micra] +# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-TypeA-6-kwargs1-Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -840,7 +840,7 @@ 'state': '1', }) # --- -# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Micra].1 +# name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-TypeA-6-kwargs1-Micra].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -953,7 +953,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '4', }) # --- # name: test_pre_brew_infusion_numbers[preinfusion_time-set_preinfusion_time-TypeB-7-kwargs2-Micra].1 diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 09ebc462952..a9a3b9f23e1 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -170,12 +170,18 @@ async def test_bluetooth_is_set_from_discovery( "homeassistant.components.lamarzocco.async_discovered_service_info", return_value=[service_info], ) as discovery, - patch("homeassistant.components.lamarzocco.LaMarzoccoMachine") as init_device, + patch( + "homeassistant.components.lamarzocco.LaMarzoccoMachine" + ) as mock_machine_class, ): + mock_machine = MagicMock() + mock_machine.get_firmware = AsyncMock() + mock_machine.firmware = mock_lamarzocco.firmware + mock_machine_class.return_value = mock_machine await async_init_integration(hass, mock_config_entry) discovery.assert_called_once() - init_device.assert_called_once() - _, kwargs = init_device.call_args + assert mock_machine_class.call_count == 2 + _, kwargs = mock_machine_class.call_args assert kwargs["bluetooth_client"] is not None assert mock_config_entry.data[CONF_NAME] == service_info.name assert mock_config_entry.data[CONF_MAC] == service_info.address @@ -223,6 +229,19 @@ async def test_gateway_version_issue( assert (issue is not None) == issue_exists +async def test_conf_host_removed_for_new_gateway( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_lamarzocco: MagicMock, +) -> None: + """Make sure we get the issue for certain gateway firmware versions.""" + mock_lamarzocco.firmware[FirmwareType.GATEWAY].current_version = "v5.0.9" + + await async_init_integration(hass, mock_config_entry) + + assert CONF_HOST not in mock_config_entry.data + + async def test_device( hass: HomeAssistant, mock_lamarzocco: MagicMock, diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr index b365ff84187..7d812c0fc67 100644 --- a/tests/components/lektrico/snapshots/test_binary_sensor.ambr +++ b/tests/components/lektrico/snapshots/test_binary_sensor.ambr @@ -24,7 +24,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Ev diode short', + 'original_name': 'EV diode short', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -37,7 +37,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev diode short', + 'friendly_name': '1p7k_500006 EV diode short', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', @@ -72,7 +72,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Ev error', + 'original_name': 'EV error', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -85,7 +85,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev error', + 'friendly_name': '1p7k_500006 EV error', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_ev_error', @@ -312,7 +312,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Rcd error', + 'original_name': 'RCD error', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -325,7 +325,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Rcd error', + 'friendly_name': '1p7k_500006 RCD error', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', diff --git a/tests/components/lektrico/snapshots/test_number.ambr b/tests/components/lektrico/snapshots/test_number.ambr index 57cf40567e7..368479cdd06 100644 --- a/tests/components/lektrico/snapshots/test_number.ambr +++ b/tests/components/lektrico/snapshots/test_number.ambr @@ -86,7 +86,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Led brightness', + 'original_name': 'LED brightness', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -98,7 +98,7 @@ # name: test_all_entities[number.1p7k_500006_led_brightness-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Led brightness', + 'friendly_name': '1p7k_500006 LED brightness', 'max': 100, 'min': 0, 'mode': , diff --git a/tests/components/lg_thinq/test_climate.py b/tests/components/lg_thinq/test_climate.py index 4ac2fa55a21..e53b1c5ff39 100644 --- a/tests/components/lg_thinq/test_climate.py +++ b/tests/components/lg_thinq/test_climate.py @@ -5,9 +5,10 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy import SnapshotAssertion -from homeassistant.const import Platform, UnitOfTemperature +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import setup_integration @@ -23,7 +24,7 @@ async def test_all_entities( entity_registry: er.EntityRegistry, ) -> None: """Test all entities.""" - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.CLIMATE]): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index b27d30ac31f..5f146cd988a 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -1,5 +1,8 @@ """Tests for the Lutron Caseta integration.""" +import asyncio +from collections.abc import Callable +from typing import Any from unittest.mock import patch from homeassistant.components.lutron_caseta import DOMAIN @@ -84,25 +87,12 @@ _LEAP_DEVICE_TYPES = { } -async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfigEntry: - """Set up a mock bridge.""" - mock_entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_MOCK_DATA) - mock_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls" - ) as create_tls: - create_tls.return_value = mock_bridge(can_connect=True) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - return mock_entry - - class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True) -> None: + def __init__(self, can_connect=True, timeout_on_connect=False) -> None: """Initialize MockBridge instance with configured mock connectivity.""" + self.timeout_on_connect = timeout_on_connect self.can_connect = can_connect self.is_currently_connected = False self.areas = self.load_areas() @@ -113,6 +103,8 @@ class MockBridge: async def connect(self): """Connect the mock bridge.""" + if self.timeout_on_connect: + await asyncio.Event().wait() # wait forever if self.can_connect: self.is_currently_connected = True @@ -320,3 +312,43 @@ class MockBridge: async def close(self): """Close the mock bridge connection.""" self.is_currently_connected = False + + +def make_mock_entry() -> MockConfigEntry: + """Create a mock config entry.""" + return MockConfigEntry(domain=DOMAIN, data=ENTRY_MOCK_DATA) + + +async def async_setup_integration( + hass: HomeAssistant, + mock_bridge: MockBridge, + config_entry_id: str | None = None, + can_connect: bool = True, + timeout_during_connect: bool = False, + timeout_during_configure: bool = False, +) -> MockConfigEntry: + """Set up a mock bridge.""" + if config_entry_id is None: + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + config_entry_id = mock_entry.entry_id + else: + mock_entry = hass.config_entries.async_get_entry(config_entry_id) + + def create_tls_factory( + *args: Any, on_connect_callback: Callable[[], None], **kwargs: Any + ) -> None: + """Return a mock bridge.""" + if not timeout_during_connect: + on_connect_callback() + return mock_bridge( + can_connect=can_connect, timeout_on_connect=timeout_during_configure + ) + + with patch( + "homeassistant.components.lutron_caseta.Smartbridge.create_tls", + create_tls_factory, + ): + await hass.config_entries.async_setup(config_entry_id) + await hass.async_block_till_done() + return mock_entry diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 1ab45bf7582..001bf86ad54 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -1,7 +1,5 @@ """The tests for Lutron Caséta device triggers.""" -from unittest.mock import patch - import pytest from pytest_unordered import unordered @@ -37,7 +35,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from . import MockBridge +from . import MockBridge, async_setup_integration from tests.common import MockConfigEntry, async_get_device_automations @@ -112,12 +110,7 @@ async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) return config_entry.entry_id @@ -487,9 +480,7 @@ async def test_if_fires_on_button_event_late_setup( }, ) - with patch("homeassistant.components.lutron_caseta.Smartbridge.create_tls"): - await hass.config_entries.async_setup(config_entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry_id) message = { ATTR_SERIAL: device.get("serial"), diff --git a/tests/components/lutron_caseta/test_diagnostics.py b/tests/components/lutron_caseta/test_diagnostics.py index 5c7d20da208..45229918578 100644 --- a/tests/components/lutron_caseta/test_diagnostics.py +++ b/tests/components/lutron_caseta/test_diagnostics.py @@ -1,6 +1,6 @@ """Test the Lutron Caseta diagnostics.""" -from unittest.mock import ANY, patch +from unittest.mock import ANY from homeassistant.components.lutron_caseta import DOMAIN from homeassistant.components.lutron_caseta.const import ( @@ -11,7 +11,7 @@ from homeassistant.components.lutron_caseta.const import ( from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from . import MockBridge +from . import MockBridge, async_setup_integration from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -34,12 +34,7 @@ async def test_diagnostics( ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) assert diag == { diff --git a/tests/components/lutron_caseta/test_init.py b/tests/components/lutron_caseta/test_init.py new file mode 100644 index 00000000000..7e509acbf62 --- /dev/null +++ b/tests/components/lutron_caseta/test_init.py @@ -0,0 +1,54 @@ +"""Tests for the Lutron Caseta integration.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components import lutron_caseta +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import MockBridge, async_setup_integration, make_mock_entry + + +@pytest.mark.parametrize( + ("constant", "message", "timeout_during_connect", "timeout_during_configure"), + [ + ("CONNECT_TIMEOUT", "Timed out on connect", True, False), + ("CONFIGURE_TIMEOUT", "Timed out on configure", False, True), + ], +) +async def test_timeout_during_setup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + constant: str, + message: str, + timeout_during_connect: bool, + timeout_during_configure: bool, +) -> None: + """Test a timeout during setup.""" + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + with patch.object(lutron_caseta, constant, 0.001): + await async_setup_integration( + hass, + MockBridge, + config_entry_id=mock_entry.entry_id, + timeout_during_connect=timeout_during_connect, + timeout_during_configure=timeout_during_configure, + ) + assert mock_entry.state is ConfigEntryState.SETUP_RETRY + assert f"{message} for 1.1.1.1" in caplog.text + + +async def test_cannot_connect( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test failing to connect.""" + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + await async_setup_integration( + hass, MockBridge, config_entry_id=mock_entry.entry_id, can_connect=False + ) + assert mock_entry.state is ConfigEntryState.SETUP_RETRY + assert "Connection failed to 1.1.1.1" in caplog.text diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index 9a58838d65c..8b4a3e00fa9 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -1,7 +1,5 @@ """The tests for lutron caseta logbook.""" -from unittest.mock import patch - from homeassistant.components.lutron_caseta.const import ( ATTR_ACTION, ATTR_AREA_NAME, @@ -43,13 +41,7 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: unique_id="abc", ) config_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) await hass.async_block_till_done() @@ -104,15 +96,10 @@ async def test_humanify_lutron_caseta_button_event_integration_not_loaded( ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() for device in device_registry.devices.values(): if device.config_entries == {config_entry.entry_id}: diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index ac23141be55..d8979083de9 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -3,12 +3,13 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from mastodon.Mastodon import Account, InstanceV2 import pytest from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry, load_fixture @pytest.fixture @@ -31,9 +32,11 @@ def mock_mastodon_client() -> Generator[AsyncMock]: ) as mock_client, ): client = mock_client.return_value - client.instance.return_value = load_json_object_fixture("instance.json", DOMAIN) - client.account_verify_credentials.return_value = load_json_object_fixture( - "account_verify_credentials.json", DOMAIN + client.instance.return_value = InstanceV2.from_json( + load_fixture("instance.json", DOMAIN) + ) + client.account_verify_credentials.return_value = Account.from_json( + load_fixture("account_verify_credentials.json", DOMAIN) ) client.status_post.return_value = None yield client diff --git a/tests/components/mastodon/fixtures/account_verify_credentials.json b/tests/components/mastodon/fixtures/account_verify_credentials.json index 401caa121ae..7806d280ab9 100644 --- a/tests/components/mastodon/fixtures/account_verify_credentials.json +++ b/tests/components/mastodon/fixtures/account_verify_credentials.json @@ -1,78 +1,60 @@ { - "id": "14715", - "username": "trwnh", - "acct": "trwnh", - "display_name": "infinite love ⴳ", - "locked": false, - "bot": false, - "created_at": "2016-11-24T10:02:12.085Z", - "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live: https://liberapay.com/at or https://paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

", - "url": "https://mastodon.social/@trwnh", - "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "followers_count": 821, - "following_count": 178, - "statuses_count": 33120, - "last_status_at": "2019-11-24T15:49:42.251Z", - "source": { - "privacy": "public", - "sensitive": false, - "language": "", - "note": "i have approximate knowledge of many things. perpetual student. (nb/ace/they)\r\n\r\nxmpp/email: a@trwnh.com\r\nhttps://trwnh.com\r\nhelp me live: https://liberapay.com/at or https://paypal.me/trwnh\r\n\r\n- my triggers are moths and glitter\r\n- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise\r\n- dm me if i did something wrong, so i can improve\r\n- purest person on fedi, do not lewd in my presence\r\n- #1 ami cole fan account\r\n\r\n:fatyoshi:", + "_mastopy_version": "2.0.0", + "_mastopy_type": "Account", + "_mastopy_data": { + "id": "14715", + "username": "trwnh", + "acct": "trwnh", + "display_name": "infinite love \u2d33", + "discoverable": true, + "group": false, + "locked": false, + "created_at": "2016-11-24T00:00:00+00:00", + "following_count": 328, + "followers_count": 3169, + "statuses_count": 69523, + "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live:
- https://donate.stripe.com/4gwcPCaMpcQ19RC4gg
- https://liberapay.com/trwnh

notes:
- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence

", + "url": "https://mastodon.social/@trwnh", + "uri": "https://mastodon.social/users/trwnh", + "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png", + "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png", + "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "moved_to_account": null, + "suspended": null, + "limited": null, + "bot": true, "fields": [ { "name": "Website", - "value": "https://trwnh.com", + "value": "https://trwnh.com", "verified_at": "2019-08-29T04:14:55.571+00:00" }, { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" + "name": "Portfolio", + "value": "https://abdullahtarawneh.com", + "verified_at": "2021-02-11T20:34:13.574+00:00" }, { "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", "verified_at": null }, { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "name": "What to expect:", + "value": "talking about various things i find interesting, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people! and to spill my thoughts.", "verified_at": null } ], - "follow_requests_count": 0 - }, - "emojis": [ - { - "shortcode": "fatyoshi", - "url": "https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png", - "static_url": "https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png", - "visible_in_picker": true - } - ], - "fields": [ - { - "name": "Website", - "value": "https://trwnh.com", - "verified_at": "2019-08-29T04:14:55.571+00:00" - }, - { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" - }, - { - "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - "verified_at": null - }, - { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - "verified_at": null - } - ] + "emojis": [], + "last_status_at": "2025-03-04T00:00:00", + "noindex": false, + "roles": [], + "role": null, + "source": null, + "mute_expires_at": null, + "indexable": false, + "hide_collections": true, + "memorial": null + } } diff --git a/tests/components/mastodon/fixtures/instance.json b/tests/components/mastodon/fixtures/instance.json index b0e904e80ef..2e3dfe2d46d 100644 --- a/tests/components/mastodon/fixtures/instance.json +++ b/tests/components/mastodon/fixtures/instance.json @@ -1,147 +1,18 @@ { - "domain": "mastodon.social", - "title": "Mastodon", - "version": "4.0.0rc1", - "source_url": "https://github.com/mastodon/mastodon", - "description": "The original server operated by the Mastodon gGmbH non-profit", - "usage": { - "users": { - "active_month": 123122 + "_mastopy_version": "2.0.0", + "_mastopy_type": "InstanceV2", + "_mastopy_data": { + "uri": "mastodon.social", + "domain": "mastodon.social", + "title": "Mastodon", + "version": "4.4.0-nightly.2025-02-07", + "source_url": "https://github.com/mastodon/mastodon", + + "description": "The original server operated by the Mastodon gGmbH non-profit", + "usage": { + "users": { + "active_month": 380143 + } } - }, - "thumbnail": { - "url": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "blurhash": "UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$", - "versions": { - "@1x": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "@2x": "https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png" - } - }, - "languages": ["en"], - "configuration": { - "urls": { - "streaming": "wss://mastodon.social" - }, - "vapid": { - "public_key": "BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=" - }, - "accounts": { - "max_featured_tags": 10, - "max_pinned_statuses": 4 - }, - "statuses": { - "max_characters": 500, - "max_media_attachments": 4, - "characters_reserved_per_url": 23 - }, - "media_attachments": { - "supported_mime_types": [ - "image/jpeg", - "image/png", - "image/gif", - "image/heic", - "image/heif", - "image/webp", - "video/webm", - "video/mp4", - "video/quicktime", - "video/ogg", - "audio/wave", - "audio/wav", - "audio/x-wav", - "audio/x-pn-wave", - "audio/vnd.wave", - "audio/ogg", - "audio/vorbis", - "audio/mpeg", - "audio/mp3", - "audio/webm", - "audio/flac", - "audio/aac", - "audio/m4a", - "audio/x-m4a", - "audio/mp4", - "audio/3gpp", - "video/x-ms-asf" - ], - "image_size_limit": 10485760, - "image_matrix_limit": 16777216, - "video_size_limit": 41943040, - "video_frame_rate_limit": 60, - "video_matrix_limit": 2304000 - }, - "polls": { - "max_options": 4, - "max_characters_per_option": 50, - "min_expiration": 300, - "max_expiration": 2629746 - }, - "translation": { - "enabled": true - } - }, - "registrations": { - "enabled": false, - "approval_required": false, - "message": null - }, - "contact": { - "email": "staff@mastodon.social", - "account": { - "id": "1", - "username": "Gargron", - "acct": "Gargron", - "display_name": "Eugen 💀", - "locked": false, - "bot": false, - "discoverable": true, - "group": false, - "created_at": "2016-03-16T00:00:00.000Z", - "note": "

Founder, CEO and lead developer @Mastodon, Germany.

", - "url": "https://mastodon.social/@Gargron", - "avatar": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "header": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "header_static": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "followers_count": 133026, - "following_count": 311, - "statuses_count": 72605, - "last_status_at": "2022-10-31", - "noindex": false, - "emojis": [], - "fields": [ - { - "name": "Patreon", - "value": "https://www.patreon.com/mastodon", - "verified_at": null - } - ] - } - }, - "rules": [ - { - "id": "1", - "text": "Sexually explicit or violent media must be marked as sensitive when posting" - }, - { - "id": "2", - "text": "No racism, sexism, homophobia, transphobia, xenophobia, or casteism" - }, - { - "id": "3", - "text": "No incitement of violence or promotion of violent ideologies" - }, - { - "id": "4", - "text": "No harassment, dogpiling or doxxing of other users" - }, - { - "id": "5", - "text": "No content illegal in Germany" - }, - { - "id": "7", - "text": "Do not share intentionally false or misleading information" - } - ] + } } diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr index 982ecee7ee2..9198410f066 100644 --- a/tests/components/mastodon/snapshots/test_diagnostics.ambr +++ b/tests/components/mastodon/snapshots/test_diagnostics.ambr @@ -3,245 +3,82 @@ dict({ 'account': dict({ 'acct': 'trwnh', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'bot': False, - 'created_at': '2016-11-24T10:02:12.085Z', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png', + 'bot': True, + 'created_at': '2016-11-24T00:00:00+00:00', + 'discoverable': True, 'display_name': 'infinite love ⴳ', 'emojis': list([ - dict({ - 'shortcode': 'fatyoshi', - 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', - 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', - 'visible_in_picker': True, - }), ]), 'fields': list([ dict({ 'name': 'Website', - 'value': 'trwnh.com', + 'value': 'trwnh.com', 'verified_at': '2019-08-29T04:14:55.571+00:00', }), dict({ - 'name': 'Sponsor', - 'value': 'liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', + 'name': 'Portfolio', + 'value': 'abdullahtarawneh.com', + 'verified_at': '2021-02-11T20:34:13.574+00:00', }), dict({ 'name': 'Fan of:', - 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', + 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', 'verified_at': None, }), dict({ - 'name': 'Main topics:', - 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', + 'name': 'What to expect:', + 'value': 'talking about various things i find interesting, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people! and to spill my thoughts.', 'verified_at': None, }), ]), - 'followers_count': 821, - 'following_count': 178, + 'followers_count': 3169, + 'following_count': 328, + 'group': False, 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'hide_collections': True, 'id': '14715', - 'last_status_at': '2019-11-24T15:49:42.251Z', + 'indexable': False, + 'last_status_at': '2025-03-04T00:00:00', + 'limited': None, 'locked': False, - 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', - 'source': dict({ - 'fields': list([ - dict({ - 'name': 'Website', - 'value': 'https://trwnh.com', - 'verified_at': '2019-08-29T04:14:55.571+00:00', - }), - dict({ - 'name': 'Sponsor', - 'value': 'https://liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', - }), - dict({ - 'name': 'Fan of:', - 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - 'verified_at': None, - }), - dict({ - 'name': 'Main topics:', - 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - 'verified_at': None, - }), - ]), - 'follow_requests_count': 0, - 'language': '', - 'note': ''' - i have approximate knowledge of many things. perpetual student. (nb/ace/they) - - xmpp/email: a@trwnh.com - https://trwnh.com - help me live: https://liberapay.com/at or https://paypal.me/trwnh - - - my triggers are moths and glitter - - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise - - dm me if i did something wrong, so i can improve - - purest person on fedi, do not lewd in my presence - - #1 ami cole fan account - - :fatyoshi: - ''', - 'privacy': 'public', - 'sensitive': False, - }), - 'statuses_count': 33120, + 'memorial': None, + 'moved_to_account': None, + 'mute_expires_at': None, + 'noindex': False, + 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live:
- donate.stripe.com/4gwcPCaMpcQ1
- liberapay.com/trwnh

notes:
- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence

', + 'role': None, + 'roles': list([ + ]), + 'source': None, + 'statuses_count': 69523, + 'suspended': None, + 'uri': 'https://mastodon.social/users/trwnh', 'url': 'https://mastodon.social/@trwnh', 'username': 'trwnh', }), 'instance': dict({ - 'configuration': dict({ - 'accounts': dict({ - 'max_featured_tags': 10, - 'max_pinned_statuses': 4, - }), - 'media_attachments': dict({ - 'image_matrix_limit': 16777216, - 'image_size_limit': 10485760, - 'supported_mime_types': list([ - 'image/jpeg', - 'image/png', - 'image/gif', - 'image/heic', - 'image/heif', - 'image/webp', - 'video/webm', - 'video/mp4', - 'video/quicktime', - 'video/ogg', - 'audio/wave', - 'audio/wav', - 'audio/x-wav', - 'audio/x-pn-wave', - 'audio/vnd.wave', - 'audio/ogg', - 'audio/vorbis', - 'audio/mpeg', - 'audio/mp3', - 'audio/webm', - 'audio/flac', - 'audio/aac', - 'audio/m4a', - 'audio/x-m4a', - 'audio/mp4', - 'audio/3gpp', - 'video/x-ms-asf', - ]), - 'video_frame_rate_limit': 60, - 'video_matrix_limit': 2304000, - 'video_size_limit': 41943040, - }), - 'polls': dict({ - 'max_characters_per_option': 50, - 'max_expiration': 2629746, - 'max_options': 4, - 'min_expiration': 300, - }), - 'statuses': dict({ - 'characters_reserved_per_url': 23, - 'max_characters': 500, - 'max_media_attachments': 4, - }), - 'translation': dict({ - 'enabled': True, - }), - 'urls': dict({ - 'streaming': 'wss://mastodon.social', - }), - 'vapid': dict({ - 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', - }), - }), - 'contact': dict({ - 'account': dict({ - 'acct': 'Gargron', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'bot': False, - 'created_at': '2016-03-16T00:00:00.000Z', - 'discoverable': True, - 'display_name': 'Eugen 💀', - 'emojis': list([ - ]), - 'fields': list([ - dict({ - 'name': 'Patreon', - 'value': 'patreon.com/mastodon', - 'verified_at': None, - }), - ]), - 'followers_count': 133026, - 'following_count': 311, - 'group': False, - 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'id': '1', - 'last_status_at': '2022-10-31', - 'locked': False, - 'noindex': False, - 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', - 'statuses_count': 72605, - 'url': 'https://mastodon.social/@Gargron', - 'username': 'Gargron', - }), - 'email': 'staff@mastodon.social', - }), + 'api_versions': None, + 'configuration': None, + 'contact': None, 'description': 'The original server operated by the Mastodon gGmbH non-profit', 'domain': 'mastodon.social', - 'languages': list([ - 'en', - ]), - 'registrations': dict({ - 'approval_required': False, - 'enabled': False, - 'message': None, - }), - 'rules': list([ - dict({ - 'id': '1', - 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', - }), - dict({ - 'id': '2', - 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', - }), - dict({ - 'id': '3', - 'text': 'No incitement of violence or promotion of violent ideologies', - }), - dict({ - 'id': '4', - 'text': 'No harassment, dogpiling or doxxing of other users', - }), - dict({ - 'id': '5', - 'text': 'No content illegal in Germany', - }), - dict({ - 'id': '7', - 'text': 'Do not share intentionally false or misleading information', - }), - ]), + 'icon': None, + 'languages': None, + 'registrations': None, + 'rules': None, 'source_url': 'https://github.com/mastodon/mastodon', - 'thumbnail': dict({ - 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', - 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - 'versions': dict({ - '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', - }), - }), + 'thumbnail': None, 'title': 'Mastodon', + 'uri': 'mastodon.social', 'usage': dict({ 'users': dict({ - 'active_month': 123122, + 'active_month': 380143, }), }), - 'version': '4.0.0rc1', + 'version': '4.4.0-nightly.2025-02-07', }), }) # --- diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr index 28157b9e6eb..46fb4c1d4e0 100644 --- a/tests/components/mastodon/snapshots/test_init.ambr +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -28,7 +28,7 @@ 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '4.0.0rc1', + 'sw_version': '4.4.0-nightly.2025-02-07', 'via_device_id': None, }) # --- diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr index 22ac2671c36..40986210454 100644 --- a/tests/components/mastodon/snapshots/test_sensor.ambr +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -47,7 +47,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '821', + 'state': '3169', }) # --- # name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] @@ -98,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '178', + 'state': '328', }) # --- # name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] @@ -149,6 +149,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '33120', + 'state': '69523', }) # --- diff --git a/tests/components/mastodon/test_services.py b/tests/components/mastodon/test_services.py index 4dafa9a8e5b..f51d39f8687 100644 --- a/tests/components/mastodon/test_services.py +++ b/tests/components/mastodon/test_services.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock, Mock, patch -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import pytest from homeassistant.components.mastodon.const import ( @@ -106,7 +106,9 @@ async def test_service_post( with ( patch.object(hass.config, "is_allowed_path", return_value=True), - patch.object(mock_mastodon_client, "media_post", return_value={"id": "1"}), + patch.object( + mock_mastodon_client, "media_post", return_value=MediaAttachment(id="1") + ), ): await hass.services.async_call( DOMAIN, @@ -163,7 +165,7 @@ async def test_post_service_failed( await hass.async_block_till_done() hass.config.is_allowed_path = Mock(return_value=True) - mock_mastodon_client.media_post.return_value = {"id": "1"} + mock_mastodon_client.media_post.return_value = MediaAttachment(id="1") mock_mastodon_client.status_post.side_effect = MastodonAPIError diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr index dc35f6f2a69..d777b9d48d0 100644 --- a/tests/components/matter/snapshots/test_number.ambr +++ b/tests/components/matter/snapshots/test_number.ambr @@ -401,8 +401,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, }), @@ -439,8 +439,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', 'friendly_name': 'Eve Thermo Temperature offset', - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, 'unit_of_measurement': , diff --git a/tests/components/mcp_server/conftest.py b/tests/components/mcp_server/conftest.py index 5ec67fb6ce3..b5e25d9fe50 100644 --- a/tests/components/mcp_server/conftest.py +++ b/tests/components/mcp_server/conftest.py @@ -5,9 +5,10 @@ from unittest.mock import AsyncMock, patch import pytest -from homeassistant.components.mcp_server.const import DOMAIN, LLM_API +from homeassistant.components.mcp_server.const import DOMAIN from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm from tests.common import MockConfigEntry @@ -21,13 +22,19 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture(name="llm_hass_api") +def llm_hass_api_fixture() -> str: + """Fixture for the config entry llm_hass_api.""" + return llm.LLM_API_ASSIST + + @pytest.fixture(name="config_entry") -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry(hass: HomeAssistant, llm_hass_api: str) -> MockConfigEntry: """Fixture to load the integration.""" config_entry = MockConfigEntry( domain=DOMAIN, data={ - CONF_LLM_HASS_API: LLM_API, + CONF_LLM_HASS_API: llm_hass_api, }, ) config_entry.add_to_hass(hass) diff --git a/tests/components/mcp_server/test_http.py b/tests/components/mcp_server/test_http.py index 905bfaa11d7..70efd211b57 100644 --- a/tests/components/mcp_server/test_http.py +++ b/tests/components/mcp_server/test_http.py @@ -16,6 +16,7 @@ import pytest from homeassistant.components.conversation import DOMAIN as CONVERSATION_DOMAIN from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.mcp_server.const import STATELESS_LLM_API from homeassistant.components.mcp_server.http import MESSAGES_API, SSE_API from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LLM_HASS_API, STATE_OFF, STATE_ON @@ -24,6 +25,7 @@ from homeassistant.helpers import ( area_registry as ar, device_registry as dr, entity_registry as er, + llm, ) from homeassistant.setup import async_setup_component @@ -297,6 +299,7 @@ async def mcp_session( yield session +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_mcp_tools_list( hass: HomeAssistant, setup_integration: None, @@ -319,6 +322,7 @@ async def test_mcp_tools_list( assert properties.get("name") == {"type": "string"} +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_mcp_tool_call( hass: HomeAssistant, setup_integration: None, @@ -371,6 +375,7 @@ async def test_mcp_tool_call_failed( assert "Error calling tool" in result.content[0].text +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_prompt_list( hass: HomeAssistant, setup_integration: None, @@ -384,13 +389,11 @@ async def test_prompt_list( assert len(result.prompts) == 1 prompt = result.prompts[0] - assert prompt.name == "Stateless Assist" - assert ( - prompt.description - == "Default prompt for the Home Assistant LLM API Stateless Assist" - ) + assert prompt.name == "Assist" + assert prompt.description == "Default prompt for Home Assistant Assist API" +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_prompt_get( hass: HomeAssistant, setup_integration: None, @@ -400,12 +403,9 @@ async def test_prompt_get( """Test the get prompt endpoint.""" async with mcp_session(mcp_sse_url, hass_supervisor_access_token) as session: - result = await session.get_prompt(name="Stateless Assist") + result = await session.get_prompt(name="Assist") - assert ( - result.description - == "Default prompt for the Home Assistant LLM API Stateless Assist" - ) + assert result.description == "Default prompt for Home Assistant Assist API" assert len(result.messages) == 1 assert result.messages[0].role == "assistant" assert result.messages[0].content.type == "text" diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 680603c097d..3ab79db73e1 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -69,6 +69,10 @@ class SimpleMediaPlayer(mp.MediaPlayerEntity): """Put device in standby.""" self._state = STATE_STANDBY + def idle(self): + """Put device in idle.""" + self._state = STATE_IDLE + class ExtendedMediaPlayer(SimpleMediaPlayer): """Media player test class.""" @@ -92,7 +96,7 @@ class ExtendedMediaPlayer(SimpleMediaPlayer): def toggle(self): """Toggle the power on the media player.""" - if self._state in [STATE_OFF, STATE_IDLE, STATE_STANDBY]: + if self._state in [STATE_OFF, STATE_STANDBY]: self._state = STATE_ON else: self._state = STATE_OFF @@ -187,3 +191,7 @@ async def test_toggle(player) -> None: assert player.state == STATE_STANDBY await player.async_toggle() assert player.state == STATE_ON + player.idle() + assert player.state == STATE_IDLE + await player.async_toggle() + assert player.state == STATE_OFF diff --git a/tests/components/meteo_france/conftest.py b/tests/components/meteo_france/conftest.py index eb28ec0a838..82b220e331e 100644 --- a/tests/components/meteo_france/conftest.py +++ b/tests/components/meteo_france/conftest.py @@ -24,8 +24,8 @@ def patch_requests(): mock_data.get_rain.return_value = Rain( load_json_object_fixture("raw_rain.json", DOMAIN) ) - mock_data.get_warning_current_phenomenoms.return_value = CurrentPhenomenons( - load_json_object_fixture("raw_warning_current_phenomenoms.json", DOMAIN) + mock_data.get_warning_current_phenomenons.return_value = CurrentPhenomenons( + load_json_object_fixture("raw_warning_current_phenomenons.json", DOMAIN) ) yield mock_data diff --git a/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json b/tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json similarity index 100% rename from tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json rename to tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index 3c30efe9dce..54d4c5f6666 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -5,6 +5,7 @@ import pytest from homeassistant.components.climate import ( ATTR_FAN_MODE, ATTR_FAN_MODES, + ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_HVAC_MODES, ATTR_SWING_MODE, @@ -31,6 +32,7 @@ from homeassistant.components.climate import ( SWING_OFF, SWING_ON, SWING_VERTICAL, + HVACAction, HVACMode, ) from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY @@ -47,6 +49,16 @@ from homeassistant.components.modbus.const import ( CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -224,6 +236,43 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") } ], }, + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 117, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_HVAC_ONOFF_REGISTER: 12, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 11, + CONF_WRITE_REGISTERS: True, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, + }, + }, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 14, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_COOLING: 0, + CONF_HVAC_ACTION_DEFROSTING: 1, + CONF_HVAC_ACTION_DRYING: 2, + CONF_HVAC_ACTION_FAN: 3, + CONF_HVAC_ACTION_HEATING: 4, + CONF_HVAC_ACTION_IDLE: 5, + CONF_HVAC_ACTION_OFF: 6, + CONF_HVAC_ACTION_PREHEATING: 7, + }, + }, + } + ], + }, ], ) async def test_config_climate(hass: HomeAssistant, mock_modbus) -> None: @@ -745,6 +794,95 @@ async def test_hvac_onoff_coil_update( assert state.state == result +@pytest.mark.parametrize( + ("do_config", "result", "register_words"), + [ + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_IDLE: 0, + CONF_HVAC_ACTION_HEATING: 1, + }, + }, + }, + ] + }, + HVACAction.HEATING, + [0x01], + ), + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_COOLING: 0, + CONF_HVAC_ACTION_HEATING: 1, + }, + }, + }, + ] + }, + HVACAction.COOLING, + [0x00], + ), + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_OFF: 0, + CONF_HVAC_ACTION_DRYING: 1, + }, + }, + }, + ] + }, + HVACAction.DRYING, + [0x01], + ), + ], +) +async def test_service_climate_action_update( + hass: HomeAssistant, mock_modbus_ha, result, register_words +) -> None: + """Test HVAC action updates.""" + mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID).attributes[ATTR_HVAC_ACTION] == result + + @pytest.mark.parametrize( ("do_config", "result", "register_words"), [ diff --git a/tests/components/moehlenhoff_alpha2/__init__.py b/tests/components/moehlenhoff_alpha2/__init__.py index 50087794560..90d6d88fedc 100644 --- a/tests/components/moehlenhoff_alpha2/__init__.py +++ b/tests/components/moehlenhoff_alpha2/__init__.py @@ -19,7 +19,7 @@ async def mock_update_data(self): for _type in ("HEATAREA", "HEATCTRL", "IODEVICE"): if not isinstance(data["Devices"]["Device"][_type], list): data["Devices"]["Device"][_type] = [data["Devices"]["Device"][_type]] - self.static_data = data + self._static_data = data async def init_integration(hass: HomeAssistant) -> MockConfigEntry: diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/common.py similarity index 91% rename from tests/components/mqtt/test_common.py rename to tests/components/mqtt/common.py index 3bb8657e2f2..e4a368f0d71 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/common.py @@ -66,6 +66,212 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "configuration_url": "http://example.com", } +MOCK_SUBENTRY_NOTIFY_COMPONENT1 = { + "363a7ecad6be4a19b939a016ea93e994": { + "platform": "notify", + "name": "Milkman alert", + "command_topic": "test-topic", + "command_template": "{{ value }}", + "entity_picture": "https://example.com/363a7ecad6be4a19b939a016ea93e994", + "retain": False, + }, +} +MOCK_SUBENTRY_NOTIFY_COMPONENT2 = { + "6494827dac294fa0827c54b02459d309": { + "platform": "notify", + "name": "The second notifier", + "command_topic": "test-topic2", + "entity_picture": "https://example.com/6494827dac294fa0827c54b02459d309", + }, +} +MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME = { + "5269352dd9534c908d22812ea5d714cd": { + "platform": "notify", + "command_topic": "test-topic", + "command_template": "{{ value }}", + "entity_picture": "https://example.com/5269352dd9534c908d22812ea5d714cd", + "retain": False, + }, +} + +MOCK_SUBENTRY_SENSOR_COMPONENT = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "device_class": "enum", + "state_topic": "test-topic", + "options": ["low", "medium", "high"], + "expire_after": 30, + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS = { + "a0f85790a95d4889924602effff06b6e": { + "platform": "sensor", + "name": "Energy", + "state_class": "measurement", + "state_topic": "test-topic", + "entity_picture": "https://example.com/a0f85790a95d4889924602effff06b6e", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "state_class": "total", + "last_reset_value_template": "{{ value_json.value }}", + "state_topic": "test-topic", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SWITCH_COMPONENT = { + "3faf1318016c46c5aea26707eeb6f12e": { + "platform": "switch", + "name": "Outlet", + "device_class": "outlet", + "command_topic": "test-topic", + "state_topic": "test-topic", + "command_template": "{{ value }}", + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/3faf1318016c46c5aea26707eeb6f12e", + "optimistic": True, + }, +} + +# Bogus light component just for code coverage +# Note that light cannot be setup through the UI yet +# The test is for code coverage +MOCK_SUBENTRY_LIGHT_COMPONENT = { + "8131babc5e8d4f44b82e0761d39091a2": { + "platform": "light", + "name": "Test light", + "command_topic": "test-topic4", + "schema": "basic", + "entity_picture": "https://example.com/8131babc5e8d4f44b82e0761d39091a2", + }, +} +MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA = { + "b10b531e15244425a74bb0abb1e9d2c6": { + "platform": "notify", + "name": "Test", + "command_topic": "bad#topic", + }, +} + +MOCK_SUBENTRY_AVAILABILITY_DATA = { + "availability": { + "availability_topic": "test/availability", + "availability_template": "{{ value_json.availability }}", + "payload_available": "online", + "payload_not_available": "offline", + } +} + +MOCK_NOTIFY_SUBENTRY_DATA_MULTI = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1 | MOCK_SUBENTRY_NOTIFY_COMPONENT2, +} | MOCK_SUBENTRY_AVAILABILITY_DATA + +MOCK_NOTIFY_SUBENTRY_DATA_SINGLE = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + "mqtt_settings": {"qos": 1}, + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1, +} +MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET, +} +MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test switch", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SWITCH_COMPONENT, +} +MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA, +} +MOCK_SUBENTRY_DATA_SET_MIX = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1 + | MOCK_SUBENTRY_NOTIFY_COMPONENT2 + | MOCK_SUBENTRY_LIGHT_COMPONENT + | MOCK_SUBENTRY_SWITCH_COMPONENT, +} | MOCK_SUBENTRY_AVAILABILITY_DATA _SENTINEL = object() DISCOVERY_COUNT = sum(len(discovery_topic) for discovery_topic in MQTT.values()) diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index b46829650f6..9241106496b 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -33,7 +33,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index 8809f2201f2..169e1ab4c6b 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -23,7 +23,7 @@ from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index f147b33c88b..f99c48a440f 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -10,7 +10,7 @@ from homeassistant.components import button, mqtt from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index cda536dc19e..b5971adcb92 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -11,7 +11,7 @@ from homeassistant.components import camera, mqtt from homeassistant.components.mqtt.camera import MQTT_CAMERA_ATTRIBUTES_BLOCKED from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 9d5401fd437..c2cce3d1344 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -27,8 +27,8 @@ from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.util.dt import utcnow +from .common import help_all_subscribe_calls from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE -from .test_common import help_all_subscribe_calls from tests.common import ( MockConfigEntry, @@ -1556,6 +1556,42 @@ async def test_setup_uses_certificate_on_certificate_set_to_auto_and_insecure( assert insecure_check["insecure"] == insecure_param +@pytest.mark.parametrize( + ("mqtt_config_entry_data", "client_id"), + [ + ( + { + mqtt.CONF_BROKER: "mock-broker", + "client_id": "random01234random0124", + }, + "random01234random0124", + ), + ( + { + mqtt.CONF_BROKER: "mock-broker", + }, + None, + ), + ], +) +async def test_client_id_is_set( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + client_id: str | None, +) -> None: + """Test setup defaults for tls.""" + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as async_client_mock: + await mqtt_mock_entry() + await hass.async_block_till_done() + assert async_client_mock.call_count == 1 + call_params: dict[str, Any] = async_client_mock.call_args[1] + assert "client_id" in call_params + assert client_id is None or client_id == call_params["client_id"] + assert call_params["client_id"] is not None + + @pytest.mark.parametrize( "mqtt_config_entry_data", [ diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 3760b0226f5..fd0b95f2b13 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -33,11 +33,16 @@ from homeassistant.components.mqtt.climate import ( MQTT_CLIMATE_ATTRIBUTES_BLOCKED, VALUE_TEMPLATE_KEYS, ) -from homeassistant.const import ATTR_TEMPERATURE, STATE_UNKNOWN, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -1823,7 +1828,7 @@ async def test_temperature_unit( @pytest.mark.parametrize( - ("hass_config", "temperature_unit", "initial", "min", "max", "current"), + ("hass_config", "units", "initial", "min", "max", "current"), [ ( help_custom_config( @@ -1836,7 +1841,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, DEFAULT_INITIAL_TEMPERATURE, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -1854,7 +1859,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, 20.5, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -1871,24 +1876,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.KELVIN, - 294, - 280, - 308, - 298, - ), - ( - help_custom_config( - climate.DOMAIN, - DEFAULT_CONFIG, - ( - { - "temperature_unit": "F", - "current_temperature_topic": "current_temperature", - }, - ), - ), - UnitOfTemperature.FAHRENHEIT, + US_CUSTOMARY_SYSTEM, 70, 45, 95, @@ -1899,25 +1887,25 @@ async def test_temperature_unit( async def test_alt_temperature_unit( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - temperature_unit: UnitOfTemperature, + units: UnitSystem, initial: float, min: float, max: float, current: float, ) -> None: """Test deriving the systems temperature unit.""" - with patch.object(hass.config.units, "temperature_unit", temperature_unit): - await mqtt_mock_entry() + hass.config.units = units + await mqtt_mock_entry() - state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == initial - assert state.attributes.get("min_temp") == min - assert state.attributes.get("max_temp") == max + state = hass.states.get(ENTITY_CLIMATE) + assert state.attributes.get("temperature") == initial + assert state.attributes.get("min_temp") == min + assert state.attributes.get("max_temp") == max - async_fire_mqtt_message(hass, "current_temperature", "77") + async_fire_mqtt_message(hass, "current_temperature", "77") - state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("current_temperature") == current + state = hass.states.get(ENTITY_CLIMATE) + assert state.attributes.get("current_temperature") == current async def test_setting_attribute_via_mqtt_json_message( diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index f39e32a0d8b..2635263ae8e 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2,6 +2,7 @@ from collections.abc import Generator, Iterator from contextlib import contextmanager +from copy import deepcopy from pathlib import Path from ssl import SSLError from typing import Any @@ -17,6 +18,8 @@ from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import AddonError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED +from homeassistant.components.mqtt.util import learn_more_url +from homeassistant.config_entries import ConfigSubentry, ConfigSubentryData from homeassistant.const import ( CONF_CLIENT_ID, CONF_PASSWORD, @@ -26,8 +29,19 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.service_info.hassio import HassioServiceInfo +from .common import ( + MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, +) + from tests.common import MockConfigEntry, MockMqttReasonCode from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient @@ -2598,3 +2612,1190 @@ async def test_migrate_of_incompatible_config_entry( await mqtt_mock_entry() assert config_entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR + + +@pytest.mark.parametrize( + ( + "config_subentries_data", + "mock_device_user_input", + "mock_entity_user_input", + "mock_entity_details_user_input", + "mock_entity_details_failed_user_input", + "mock_mqtt_user_input", + "mock_failed_mqtt_user_input", + "entity_name", + ), + [ + ( + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + {"name": "Milk notifier", "mqtt_settings": {"qos": 1}}, + {"name": "Milkman alert"}, + None, + None, + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "retain": False, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), + "Milk notifier Milkman alert", + ), + ( + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, + {"name": "Milk notifier", "mqtt_settings": {"qos": 0}}, + {}, + None, + None, + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "retain": False, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), + "Milk notifier", + ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + {"device_class": "enum", "options": ["low", "medium", "high"]}, + ( + ( + { + "device_class": "energy", + "unit_of_measurement": "ppm", + }, + {"unit_of_measurement": "invalid_uom"}, + ), + # Trigger options to be shown on the form + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + # Test options are only allowed with device_class enum + ( + { + "device_class": "energy", + "options": ["less", "more"], + }, + { + "device_class": "options_device_class_enum", + "unit_of_measurement": "uom_required_for_device_class", + }, + ), + # Include options again to allow flow with valid data + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + ( + { + "device_class": "enum", + "state_class": "measurement", + "options": ["less", "more"], + }, + {"options": "options_not_allowed_with_state_class_or_uom"}, + ), + ), + { + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "advanced_settings": {"expire_after": 30}, + }, + ( + ( + {"state_topic": "test-topic#invalid"}, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test sensor Energy", + ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + { + "state_class": "measurement", + }, + (), + { + "state_topic": "test-topic", + }, + (), + "Test sensor Energy", + ), + ( + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test switch", "mqtt_settings": {"qos": 0}}, + {"name": "Outlet"}, + {"device_class": "outlet"}, + (), + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "optimistic": True, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ( + { + "command_topic": "test-topic", + "state_topic": "test-topic#invalid", + }, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test switch Outlet", + ), + ], + ids=[ + "notify_with_entity_name", + "notify_no_entity_name", + "sensor_options", + "sensor_total", + "switch", + ], +) +async def test_subentry_configflow( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + config_subentries_data: dict[str, Any], + mock_device_user_input: dict[str, Any], + mock_entity_user_input: dict[str, Any], + mock_entity_details_user_input: dict[str, Any], + mock_entity_details_failed_user_input: tuple[ + tuple[dict[str, Any], dict[str, str]], + ], + mock_mqtt_user_input: dict[str, Any], + mock_failed_mqtt_user_input: tuple[tuple[dict[str, Any], dict[str, str]],], + entity_name: str, +) -> None: + """Test the subentry ConfigFlow.""" + device_name = mock_device_user_input["name"] + component = next(iter(config_subentries_data["components"].values())) + + await mqtt_mock_entry() + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, "device"), + context={"source": config_entries.SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + + # Test the URL validation + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "name": device_name, + "configuration_url": "http:/badurl.example.com", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + assert result["errors"]["configuration_url"] == "invalid_url" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=mock_device_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + assert result["errors"] == {} + + # Process entity flow (initial step) + + # Test the entity picture URL validation + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "platform": component["platform"], + "entity_picture": "invalid url", + } + | mock_entity_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # Try again with valid data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "platform": component["platform"], + "entity_picture": component["entity_picture"], + } + | mock_entity_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["description_placeholders"] == { + "mqtt_device": device_name, + "platform": component["platform"], + "entity": entity_name, + "url": learn_more_url(component["platform"]), + } + + # Process extra step if the platform supports it + if mock_entity_details_user_input is not None: + # Extra entity details flow step + assert result["step_id"] == "entity_platform_config" + + # First test validators if set of test + for failed_user_input, failed_errors in mock_entity_details_failed_user_input: + # Test an invalid entity details user input case + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors + + # Now try again with valid data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=mock_entity_details_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["description_placeholders"] == { + "mqtt_device": device_name, + "platform": component["platform"], + "entity": entity_name, + "url": learn_more_url(component["platform"]), + } + else: + # No details form step + assert result["step_id"] == "mqtt_platform_config" + + # Process mqtt platform config flow + # Test an invalid mqtt user input case + for failed_user_input, failed_errors in mock_failed_mqtt_user_input: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors + + # Try again with a valid configuration + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], user_input=mock_mqtt_user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == device_name + + subentry_component = next( + iter(next(iter(config_entry.subentries.values())).data["components"].values()) + ) + assert subentry_component == next( + iter(config_subentries_data["components"].values()) + ) + + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], + ids=["notify"], +) +async def test_subentry_reconfigure_remove_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the subentry ConfigFlow reconfigure removing an entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + object_list = list(components) + component_list = list(components.values()) + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) + + for key, component in components.items(): + unique_entity_id = f"{subentry_id}_{key}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], + platform=mqtt.DOMAIN, + unique_id=unique_entity_id, + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can delete an entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "delete_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "delete_entity" + assert result["data_schema"].schema["component"].config["options"] == [ + {"value": object_list[0], "label": entity_name_0}, + {"value": object_list[1], "label": entity_name_1}, + ] + # remove notify_the_second_notifier + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "component": object_list[1], + }, + ) + + # assert menu options, we have only one item left, we cannot delete it + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + "save_changes", + ] + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # check if the second entity was removed from the subentry and entity registry + unique_entity_id = f"{subentry_id}_{object_list[1]}" + entity_id = entity_registry.async_get_entity_id( + domain=components[object_list[1]]["platform"], + platform=mqtt.DOMAIN, + unique_id=unique_entity_id, + ) + assert entity_id is None + new_components = deepcopy(dict(subentry.data))["components"] + assert object_list[0] in new_components + assert object_list[1] not in new_components + + +@pytest.mark.parametrize( + ("mqtt_config_subentries_data", "user_input_mqtt"), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ), + {"command_topic": "test-topic2-updated"}, + ) + ], + ids=["notify"], +) +async def test_subentry_reconfigure_edit_entity_multi_entitites( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_mqtt: dict[str, Any], +) -> None: + """Test the subentry ConfigFlow reconfigure with multi entities.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + object_list = list(components) + component_list = list(components.values()) + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) + + for key in components: + unique_entity_id = f"{subentry_id}_{key}" + entity_id = entity_registry.async_get_entity_id( + domain="notify", platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can update an entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "update_entity" + assert result["data_schema"].schema["component"].config["options"] == [ + {"value": object_list[0], "label": entity_name_0}, + {"value": object_list[1], "label": entity_name_1}, + ] + # select second entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "component": object_list[1], + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the common entity data with changed entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "entity_picture": "https://example.com", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have our components + new_components = deepcopy(dict(subentry.data))["components"] + + # Check the second component was updated + assert new_components[object_list[0]] == components[object_list[0]] + for key, value in user_input_mqtt.items(): + assert new_components[object_list[1]][key] == value + + +@pytest.mark.parametrize( + ( + "mqtt_config_subentries_data", + "user_input_platform_config_validation", + "user_input_platform_config", + "user_input_mqtt", + "removed_options", + ), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + (), + None, + { + "command_topic": "test-topic1-updated", + "command_template": "{{ value }}", + "retain": True, + }, + {"entity_picture"}, + ), + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + ( + ( + { + "device_class": "battery", + "options": [], + "state_class": "measurement", + "unit_of_measurement": "invalid", + }, + # Allow to accept options are being removed + { + "device_class": "options_device_class_enum", + "options": "options_not_allowed_with_state_class_or_uom", + "unit_of_measurement": "invalid_uom", + }, + ), + ), + { + "device_class": "battery", + "state_class": "measurement", + "unit_of_measurement": "%", + "advanced_settings": {"suggested_display_precision": 1}, + }, + { + "state_topic": "test-topic1-updated", + "value_template": "{{ value_json.value }}", + }, + {"options", "expire_after", "entity_picture"}, + ), + ], + ids=["notify", "sensor"], +) +async def test_subentry_reconfigure_edit_entity_single_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_platform_config_validation: tuple[ + tuple[dict[str, Any], dict[str, str] | None], ... + ] + | None, + user_input_platform_config: dict[str, Any] | None, + user_input_mqtt: dict[str, Any], + removed_options: tuple[str, ...], +) -> None: + """Test the subentry ConfigFlow reconfigure with single entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + # Check we have "notify_milkman_alert" in our mock data + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + + component_id, component = next(iter(components.items())) + + unique_entity_id = f"{subentry_id}_{component_id}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data, reset entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.FORM + + if user_input_platform_config is None: + # Skip entity flow step + assert result["step_id"] == "mqtt_platform_config" + else: + # Additional entity flow step + assert result["step_id"] == "entity_platform_config" + for entity_validation_config, errors in user_input_platform_config_validation: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=entity_validation_config, + ) + assert result["step_id"] == "entity_platform_config" + assert result.get("errors") == errors + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_platform_config, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data, + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 1 + + # Check our update was successful + assert "entity_picture" not in new_components[component_id] + + # Check the second component was updated + for key, value in user_input_mqtt.items(): + assert new_components[component_id][key] == value + + assert set(component) - set(new_components[component_id]) == removed_options + + +@pytest.mark.parametrize( + ( + "mqtt_config_subentries_data", + "user_input_entity_details", + "user_input_mqtt", + "filtered_out_fields", + ), + [ + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + subentry_type="device", + title="Mock subentry", + ), + ), + { + "state_class": "measurement", + }, + { + "state_topic": "test-topic", + }, + ("last_reset_value_template",), + ), + ], + ids=["sensor_last_reset_template"], +) +async def test_subentry_reconfigure_edit_entity_reset_fields( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_entity_details: dict[str, Any], + user_input_mqtt: dict[str, Any], + filtered_out_fields: tuple[str, ...], +) -> None: + """Test the subentry ConfigFlow reconfigure resets filtered out fields.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + + component_id, component = next(iter(components.items())) + for field in filtered_out_fields: + assert field in component + + unique_entity_id = f"{subentry_id}_{component_id}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data, reset entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity_platform_config" + + # submit the new entity platform config + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_entity_details, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific mqtt data, + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 1 + + # Check our update was successful + assert "entity_picture" not in new_components[component_id] + + # Check the second component was updated + for key, value in user_input_mqtt.items(): + assert new_components[component_id][key] == value + + # Check field are filtered out correctly + for field in filtered_out_fields: + assert field not in new_components[component_id] + + +@pytest.mark.parametrize( + ("mqtt_config_subentries_data", "user_input_entity", "user_input_mqtt"), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + { + "platform": "notify", + "name": "The second notifier", + "entity_picture": "https://example.com", + }, + { + "command_topic": "test-topic2", + }, + ) + ], + ids=["notify_notify"], +) +async def test_subentry_reconfigure_add_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_entity: dict[str, Any], + user_input_mqtt: dict[str, Any], +) -> None: + """Test the subentry ConfigFlow reconfigure and add an entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + component_id_1, component1 = next(iter(components.items())) + unique_entity_id = f"{subentry_id}_{component_id_1}" + entity_id = entity_registry.async_get_entity_id( + domain=component1["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_entity, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # Finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 2 + + component_id_2 = next(iter(set(new_components) - {component_id_1})) + + # Check our new entity was added correctly + expected_component_config = user_input_entity | user_input_mqtt + for key, value in expected_component_config.items(): + assert new_components[component_id_2][key] == value + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_subentry_reconfigure_update_device_properties( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the subentry ConfigFlow reconfigure and update device properties.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + + # Assert initial data + device = deepcopy(dict(subentry.data))["device"] + assert device["name"] == "Milk notifier" + assert device["sw_version"] == "1.0" + assert device["hw_version"] == "2.1 rev a" + assert device["model"] == "Model XL" + assert device["model_id"] == "mn002" + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can update the device properties + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "device"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + + # Update the device details + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "name": "Beer notifier", + "sw_version": "1.1", + "model": "Beer bottle XL", + "model_id": "bn003", + "configuration_url": "https://example.com", + }, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check our device was updated + device = deepcopy(dict(subentry.data))["device"] + assert device["name"] == "Beer notifier" + assert "hw_version" not in device + assert device["model"] == "Beer bottle XL" + assert device["model_id"] == "bn003" + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_subentry_reconfigure_availablity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the subentry ConfigFlow reconfigure and update device properties.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + + expected_availability = { + "availability_topic": "test/availability", + "availability_template": "{{ value_json.availability }}", + "payload_available": "online", + "payload_not_available": "offline", + } + assert subentry.data.get("availability") == expected_availability + + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we can set the availability config + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "availability"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "availability" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "availability_topic": "test/new_availability#invalid_topic", + "payload_available": "1", + "payload_not_available": "0", + }, + ) + assert result["errors"] == {"availability_topic": "invalid_subscribe_topic"} + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "availability_topic": "test/new_availability", + "payload_available": "1", + "payload_not_available": "0", + }, + ) + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check the availability was updated + expected_availability = { + "availability_topic": "test/new_availability", + "payload_available": "1", + "payload_not_available": "0", + } + assert subentry.data.get("availability") == expected_availability + + # Assert we can reset the availability config + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "availability"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "availability" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "payload_available": "1", + "payload_not_available": "0", + }, + ) + + # Finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check the availability was updated + assert subentry.data.get("availability") == { + "payload_available": "1", + "payload_not_available": "0", + } diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index ee74b78be81..81530758de7 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -37,6 +37,7 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, STATE_CLOSED, @@ -45,7 +46,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -936,6 +937,63 @@ async def test_send_stop_cover_command( assert state.state == STATE_UNKNOWN +@pytest.mark.parametrize( + ("hass_config", "payload_stop"), + [ + ( + { + mqtt.DOMAIN: { + cover.DOMAIN: { + "name": "test", + "state_topic": "state-topic", + "tilt_command_topic": "tilt-command-topic", + "payload_stop_tilt": "TILT_STOP", + "qos": 2, + } + } + }, + "TILT_STOP", + ), + ( + { + mqtt.DOMAIN: { + cover.DOMAIN: { + "name": "test", + "state_topic": "state-topic", + "tilt_command_topic": "tilt-command-topic", + "qos": 2, + } + } + }, + "STOP", + ), + ], +) +async def test_send_stop_tilt_command( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + payload_stop: str, +) -> None: + """Test the sending of stop_cover_tilt.""" + mqtt_mock = await mqtt_mock_entry() + + state = hass.states.get("cover.test") + assert state.state == STATE_UNKNOWN + + await hass.services.async_call( + cover.DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: "cover.test"}, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + "tilt-command-topic", payload_stop, 2, False + ) + state = hass.states.get("cover.test") + assert state.state == STATE_UNKNOWN + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_device_tracker.py b/tests/components/mqtt/test_device_tracker.py index 00e88860299..02289c8e476 100644 --- a/tests/components/mqtt/test_device_tracker.py +++ b/tests/components/mqtt/test_device_tracker.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .test_common import ( +from .common import ( help_custom_config, help_test_reloadable, help_test_setting_blocked_attribute_via_mqtt_json_message, diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 5cdfb14a5cf..ecf922e54a1 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -16,7 +16,7 @@ from homeassistant.helpers import device_registry as dr from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.setup import async_setup_component -from .test_common import help_test_unload_config_entry +from .common import help_test_unload_config_entry from tests.common import async_fire_mqtt_message, async_get_device_automations from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 47c3a1e1988..ee33cbcbaa1 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -46,8 +46,8 @@ from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util.signal_type import SignalTypeFormat +from .common import help_all_subscribe_calls, help_test_unload_config_entry from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE -from .test_common import help_all_subscribe_calls, help_test_unload_config_entry from .test_tag import DEFAULT_TAG_ID, DEFAULT_TAG_SCAN from tests.common import ( diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 41049ed0887..a7f00a1d1a8 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -13,7 +13,7 @@ from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 6c8afe8c1b4..36b5032e282 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -36,7 +36,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 20ca89181eb..435531182ed 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -36,7 +36,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_image.py b/tests/components/mqtt/test_image.py index 6f0eb8edf49..9b64a8836a0 100644 --- a/tests/components/mqtt/test_image.py +++ b/tests/components/mqtt/test_image.py @@ -14,7 +14,7 @@ from homeassistant.components import image, mqtt from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 0bef4196ef2..c58402c4f5c 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -19,7 +19,7 @@ from homeassistant.components.mqtt.lawn_mower import MQTT_LAWN_MOWER_ATTRIBUTES_ from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index f8c66a3de1d..a8be259c1c9 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -210,7 +210,7 @@ from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 67d382826ae..f3264858095 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -102,7 +102,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State from homeassistant.util.json import json_loads -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 568d86f8bd9..b3a1c11c2b6 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -45,7 +45,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index 034f9b5ff6e..4aa6ecd03ef 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -23,7 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index d65f1a4d661..fa30283962b 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -1,18 +1,31 @@ """The tests for shared code of the MQTT platform.""" -from unittest.mock import patch +from typing import Any +from unittest.mock import call, patch import pytest from homeassistant.components import mqtt, sensor from homeassistant.components.mqtt.sensor import DEFAULT_NAME as DEFAULT_SENSOR_NAME +from homeassistant.config_entries import ConfigSubentryData from homeassistant.const import ( ATTR_FRIENDLY_NAME, EVENT_HOMEASSISTANT_STARTED, EVENT_STATE_CHANGED, ) from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) +from homeassistant.util import slugify + +from .common import ( + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, + MOCK_SUBENTRY_DATA_SET_MIX, +) from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message from tests.typing import MqttMockHAClientGenerator @@ -453,3 +466,124 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_SUBENTRY_DATA_SET_MIX, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_loading_subentries( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test loading subentries.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is not None + for object_id, component in mqtt_config_subentries_data[0]["data"][ + "components" + ].items(): + platform = component["platform"] + entity_id = f"{platform}.{slugify(device.name)}_{slugify(component['name'])}" + entity_entry_entity_id = entity_registry.async_get_entity_id( + platform, mqtt.DOMAIN, f"{subentry_id}_{object_id}" + ) + assert entity_entry_entity_id == entity_id + state = hass.states.get(entity_id) + assert state is not None + assert ( + state.attributes.get("entity_picture") == f"https://example.com/{object_id}" + ) + # Availability was configured, so entities are unavailable + assert state.state == "unavailable" + + # Make entities available + async_fire_mqtt_message(hass, "test/availability", '{"availability": "online"}') + for component in mqtt_config_subentries_data[0]["data"]["components"].values(): + platform = component["platform"] + entity_id = f"{platform}.{slugify(device.name)}_{slugify(component['name'])}" + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "unknown" + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_loading_subentry_with_bad_component_schema( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test loading subentries.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is None + assert ( + "Schema violation occurred when trying to set up entity from subentry" + in caplog.text + ) + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_qos_on_mqt_device_from_subentry( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, +) -> None: + """Test QoS is set correctly on entities from MQTT device.""" + mqtt_mock = await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is not None + assert hass.states.get("notify.milk_notifier_milkman_alert") is not None + await hass.services.async_call( + "notify", + "send_message", + {"entity_id": "notify.milk_notifier_milkman_alert", "message": "Test message"}, + ) + await hass.async_block_till_done() + assert len(mqtt_mock.async_publish.mock_calls) == 1 + mqtt_mock.async_publish.mock_calls[0] = call("test-topic", "Test message", 1, False) diff --git a/tests/components/mqtt/test_notify.py b/tests/components/mqtt/test_notify.py index 4837ee214c4..56da809d1b6 100644 --- a/tests/components/mqtt/test_notify.py +++ b/tests/components/mqtt/test_notify.py @@ -11,7 +11,7 @@ from homeassistant.components.notify import ATTR_MESSAGE from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 7bdd39e81a7..f391236aca4 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -31,7 +31,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_scene.py b/tests/components/mqtt/test_scene.py index d78dbe5c003..1650fe74601 100644 --- a/tests/components/mqtt/test_scene.py +++ b/tests/components/mqtt/test_scene.py @@ -10,7 +10,7 @@ from homeassistant.components import mqtt, scene from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 8d79a3ce609..a880368fa51 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -21,7 +21,7 @@ from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOW from homeassistant.core import HomeAssistant, State from homeassistant.helpers.typing import ConfigType -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 1fcd70a0b10..74dc94de21e 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -25,7 +25,7 @@ from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 58a5cb735f9..5d82708e242 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -20,7 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index dceeff07377..d834595afe0 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -16,7 +16,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index 41c417fe3e9..95326382dcc 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .test_common import help_test_unload_config_entry +from .common import help_test_unload_config_entry from tests.common import ( MockConfigEntry, diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index 96924030279..050b2b59590 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -11,7 +11,7 @@ from homeassistant.components import mqtt, text from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index 4ca10cbe8b2..d70d7dd792b 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -10,7 +10,7 @@ from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INS from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index c1c662048d7..ba404e2dff0 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -33,7 +33,7 @@ from homeassistant.const import CONF_NAME, ENTITY_MATCH_ALL, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 6dd0102b8a3..10387a5b19e 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -27,7 +27,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 02ae54c1a85..21969ad7788 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -33,8 +33,13 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.util.unit_conversion import TemperatureConverter +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -714,7 +719,7 @@ async def test_temperature_unit( @pytest.mark.parametrize( - ("hass_config", "temperature_unit", "initial", "min_temp", "max_temp", "current"), + ("hass_config", "units", "initial", "min_temp", "max_temp", "current"), [ ( help_custom_config( @@ -727,7 +732,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, _DEFAULT_MIN_TEMP_CELSIUS, _DEFAULT_MIN_TEMP_CELSIUS, _DEFAULT_MAX_TEMP_CELSIUS, @@ -744,24 +749,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.KELVIN, - 316, - 316, - 333, - 322, - ), - ( - help_custom_config( - water_heater.DOMAIN, - DEFAULT_CONFIG, - ( - { - "temperature_unit": "F", - "current_temperature_topic": "current_temperature", - }, - ), - ), - UnitOfTemperature.FAHRENHEIT, + US_CUSTOMARY_SYSTEM, DEFAULT_MIN_TEMP, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -772,25 +760,25 @@ async def test_temperature_unit( async def test_alt_temperature_unit( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - temperature_unit: UnitOfTemperature, + units: UnitSystem, initial: float, min_temp: float, max_temp: float, current: float, ) -> None: """Test deriving the systems temperature unit.""" - with patch.object(hass.config.units, "temperature_unit", temperature_unit): - await mqtt_mock_entry() + hass.config.units = units + await mqtt_mock_entry() - state = hass.states.get(ENTITY_WATER_HEATER) - assert state.attributes.get("temperature") == initial - assert state.attributes.get("min_temp") == min_temp - assert state.attributes.get("max_temp") == max_temp + state = hass.states.get(ENTITY_WATER_HEATER) + assert state.attributes.get("temperature") == initial + assert state.attributes.get("min_temp") == min_temp + assert state.attributes.get("max_temp") == max_temp - async_fire_mqtt_message(hass, "current_temperature", "120") + async_fire_mqtt_message(hass, "current_temperature", "120") - state = hass.states.get(ENTITY_WATER_HEATER) - assert state.attributes.get("current_temperature") == current + state = hass.states.get(ENTITY_WATER_HEATER) + assert state.attributes.get("current_temperature") == current async def test_setting_attribute_via_mqtt_json_message( diff --git a/tests/components/nam/fixtures/nam_data.json b/tests/components/nam/fixtures/nam_data.json index 82dacbefb34..47ebe099dc7 100644 --- a/tests/components/nam/fixtures/nam_data.json +++ b/tests/components/nam/fixtures/nam_data.json @@ -26,6 +26,7 @@ { "value_type": "temperature", "value": "6.26" }, { "value_type": "HECA_temperature", "value": "7.95" }, { "value_type": "HECA_humidity", "value": "49.97" }, + { "value_type": "ambient_light", "value": "298.45" }, { "value_type": "signal", "value": "-72" } ] } diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr index e92e02fa1d8..c0009899d16 100644 --- a/tests/components/nam/snapshots/test_diagnostics.ambr +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_entry_diagnostics dict({ 'data': dict({ + 'bh1750_illuminance': 298.45, 'bme280_humidity': 45.69, 'bme280_pressure': 1011.0117, 'bme280_temperature': 7.56, diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index 429d069b741..c6c32737a31 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,4 +1,59 @@ # serializer version: 1 +# name: test_sensor[sensor.nettigo_air_monitor_bh1750_illuminance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bh1750_illuminance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BH1750 illuminance', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bh1750_illuminance', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bh1750_illuminance', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bh1750_illuminance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Nettigo Air Monitor BH1750 illuminance', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bh1750_illuminance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '298.45', + }) +# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nam/test_button.py b/tests/components/nam/test_button.py index 39c37d57f89..b410665911a 100644 --- a/tests/components/nam/test_button.py +++ b/tests/components/nam/test_button.py @@ -2,9 +2,20 @@ from unittest.mock import patch -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass +from aiohttp.client_exceptions import ClientError +from nettigo_air_monitor import ApiError, AuthFailedError +import pytest + +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + SERVICE_PRESS, + ButtonDeviceClass, +) +from homeassistant.components.nam import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -38,7 +49,7 @@ async def test_button_press(hass: HomeAssistant) -> None: ): await hass.services.async_call( BUTTON_DOMAIN, - "press", + SERVICE_PRESS, {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, blocking=True, ) @@ -49,3 +60,55 @@ async def test_button_press(hass: HomeAssistant) -> None: state = hass.states.get("button.nettigo_air_monitor_restart") assert state assert state.state == now.isoformat() + + +@pytest.mark.parametrize(("exc"), [ApiError("API Error"), ClientError]) +async def test_button_press_exc(hass: HomeAssistant, exc: Exception) -> None: + """Test button press when exception occurs.""" + await init_integration(hass) + + with ( + patch( + "homeassistant.components.nam.NettigoAirMonitor.async_restart", + side_effect=exc, + ), + pytest.raises( + HomeAssistantError, + match="An error occurred while calling action for button.nettigo_air_monitor_restart", + ), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, + blocking=True, + ) + + +async def test_button_press_auth_error(hass: HomeAssistant) -> None: + """Test button press when auth error occurs.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nam.NettigoAirMonitor.async_restart", + side_effect=AuthFailedError("auth error"), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 92d90a18a7e..b4b94efce5b 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -144,13 +144,14 @@ async def auth( return FakeAuth(aioclient_mock, create_device, device_access_project_id) -@pytest.fixture(autouse=True) -def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: +@pytest.fixture(autouse=True, name="media_path") +def cleanup_media_storage(hass: HomeAssistant) -> Generator[str]: """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): - yield - shutil.rmtree(hass.config.path(tmp_path), ignore_errors=True) + full_path = hass.config.path(tmp_path) + yield full_path + shutil.rmtree(full_path, ignore_errors=True) @pytest.fixture diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index d009e1185da..0b0654fc69c 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -8,11 +8,13 @@ from collections.abc import Generator import datetime from http import HTTPStatus import io +import pathlib from typing import Any from unittest.mock import patch import aiohttp import av +from freezegun import freeze_time import numpy as np import pytest @@ -39,7 +41,7 @@ from .common import ( ) from .conftest import FakeAuth -from tests.common import MockUser, async_capture_events +from tests.common import MockUser, async_capture_events, async_fire_time_changed from tests.typing import ClientSessionGenerator DOMAIN = "nest" @@ -1574,3 +1576,80 @@ async def test_event_clip_media_attachment( response = await client.get(content_path) assert response.status == HTTPStatus.OK, f"Response not matched: {response}" await response.read() + + +@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)]) +async def test_remove_stale_media( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + auth, + mp4, + hass_client: ClientSessionGenerator, + subscriber, + setup_platform, + media_path: str, +) -> None: + """Test media files getting evicted from the cache.""" + await setup_platform() + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Publish a media event + auth.responses = [ + aiohttp.web.Response(body=mp4.getvalue()), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event_message( + create_battery_event_data(MOTION_EVENT), + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + # The first subdirectory is the device id. Media for events are stored in the + # device subdirectory. First verify that the media was persisted. We will + # then add additional media files, then invoke the garbage collector, and + # then verify orphaned files are removed. + storage_path = pathlib.Path(media_path) + device_path = storage_path / device.id + media_files = list(device_path.glob("*")) + assert len(media_files) == 1 + event_media = media_files[0] + assert event_media.name.endswith(".mp4") + + event_time1 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=8) + extra_media1 = ( + device_path / f"{int(event_time1.timestamp())}-camera_motion-test.mp4" + ) + extra_media1.write_bytes(mp4.getvalue()) + event_time2 = event_time1 + datetime.timedelta(hours=20) + extra_media2 = ( + device_path / f"{int(event_time2.timestamp())}-camera_motion-test.jpg" + ) + extra_media2.write_bytes(mp4.getvalue()) + # This event will not be garbage collected because it is too recent + event_time3 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=3) + extra_media3 = ( + device_path / f"{int(event_time3.timestamp())}-camera_motion-test.mp4" + ) + extra_media3.write_bytes(mp4.getvalue()) + + assert len(list(device_path.glob("*"))) == 4 + + # Advance the clock to invoke the garbage collector. This will remove extra + # files that are not valid events that are old enough. + point_in_time = datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1) + with freeze_time(point_in_time): + async_fire_time_changed(hass, point_in_time) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Verify that the event media is still present and that the extra files + # are removed. Newer media is not removed. + assert event_media.exists() + assert not extra_media1.exists() + assert not extra_media2.exists() + assert extra_media3.exists() diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index b149e80fa5b..00285f565a6 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1501,7 +1501,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -1520,7 +1520,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -1535,10 +1535,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home avg Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1659,60 +1660,6 @@ 'state': '63.2', }) # --- -# name: test_entity[sensor.home_avg_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_avg_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-avg-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_avg_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home avg None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_avg_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.0', - }) -# --- # name: test_entity[sensor.home_avg_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1939,6 +1886,61 @@ 'state': '22.7', }) # --- +# name: test_entity[sensor.home_avg_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_avg_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-avg-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_avg_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home avg Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_avg_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.0', + }) +# --- # name: test_entity[sensor.home_avg_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2061,7 +2063,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2080,7 +2082,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2095,10 +2097,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home max Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2219,60 +2222,6 @@ 'state': '76', }) # --- -# name: test_entity[sensor.home_max_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_max_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-max-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_max_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home max None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_max_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_max_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2499,6 +2448,61 @@ 'state': '27.4', }) # --- +# name: test_entity[sensor.home_max_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_max_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-max-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_max_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home max Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_max_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_max_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2621,7 +2625,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2640,7 +2644,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2655,10 +2659,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home min Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2779,60 +2784,6 @@ 'state': '56', }) # --- -# name: test_entity[sensor.home_min_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_min_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home min None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_min_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_min_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3059,6 +3010,61 @@ 'state': '19.8', }) # --- +# name: test_entity[sensor.home_min_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home min Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_min_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6253,7 +6259,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6272,7 +6278,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -6287,8 +6293,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Gust angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -6524,7 +6531,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6543,7 +6550,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Wind angle', 'platform': 'netatmo', @@ -6558,8 +6565,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Wind angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/netatmo/test_init.py b/tests/components/netatmo/test_init.py index 5fdf4f8ea35..c1a687c6fa8 100644 --- a/tests/components/netatmo/test_init.py +++ b/tests/components/netatmo/test_init.py @@ -25,11 +25,7 @@ from .common import ( simulate_webhook, ) -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_get_persistent_notifications, -) +from tests.common import MockConfigEntry, async_fire_time_changed from tests.components.cloud import mock_cloud from tests.typing import WebSocketGenerator @@ -423,9 +419,8 @@ async def test_setup_component_invalid_token_scope(hass: HomeAssistant) -> None: assert config_entry.state is ConfigEntryState.SETUP_ERROR assert hass.config_entries.async_entries(DOMAIN) - notifications = async_get_persistent_notifications(hass) - - assert len(notifications) > 0 + # Test a reauth flow is initiated + assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1 for config_entry in hass.config_entries.async_entries("netatmo"): await hass.config_entries.async_remove(config_entry.entry_id) @@ -476,8 +471,9 @@ async def test_setup_component_invalid_token( assert config_entry.state is ConfigEntryState.SETUP_ERROR assert hass.config_entries.async_entries(DOMAIN) - notifications = async_get_persistent_notifications(hass) - assert len(notifications) > 0 + + # Test a reauth flow is initiated + assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1 for entry in hass.config_entries.async_entries("netatmo"): await hass.config_entries.async_remove(entry.entry_id) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr index 84c1d33f886..e6154841a28 100644 --- a/tests/components/nextcloud/snapshots/test_sensor.ambr +++ b/tests/components/nextcloud/snapshots/test_sensor.ambr @@ -1424,7 +1424,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Cache ttl', + 'original_name': 'Cache TTL', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1436,7 +1436,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache ttl', + 'friendly_name': 'my.nc_url.local Cache TTL', }), 'context': , 'entity_id': 'sensor.my_nc_url_local_cache_ttl', @@ -1474,7 +1474,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 15 minutes', + 'original_name': 'CPU load last 15 minutes', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1486,7 +1486,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', + 'friendly_name': 'my.nc_url.local CPU load last 15 minutes', 'unit_of_measurement': 'load', }), 'context': , @@ -1525,7 +1525,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 1 minute', + 'original_name': 'CPU load last 1 minute', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1537,7 +1537,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', + 'friendly_name': 'my.nc_url.local CPU load last 1 minute', 'unit_of_measurement': 'load', }), 'context': , @@ -1576,7 +1576,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 5 minutes', + 'original_name': 'CPU load last 5 minutes', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1588,7 +1588,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', + 'friendly_name': 'my.nc_url.local CPU load last 5 minutes', 'unit_of_measurement': 'load', }), 'context': , diff --git a/tests/components/nextdns/test_button.py b/tests/components/nextdns/test_button.py index 51970b9bb48..3d2422c34a7 100644 --- a/tests/components/nextdns/test_button.py +++ b/tests/components/nextdns/test_button.py @@ -1,12 +1,19 @@ """Test button of NextDNS integration.""" -from unittest.mock import patch +from unittest.mock import Mock, patch +from aiohttp import ClientError +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import ApiError, InvalidApiKeyError +import pytest from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.nextdns.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -36,7 +43,7 @@ async def test_button_press(hass: HomeAssistant) -> None: ): await hass.services.async_call( BUTTON_DOMAIN, - "press", + SERVICE_PRESS, {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, blocking=True, ) @@ -47,3 +54,60 @@ async def test_button_press(hass: HomeAssistant) -> None: state = hass.states.get("button.fake_profile_clear_logs") assert state assert state.state == now.isoformat() + + +@pytest.mark.parametrize( + "exc", + [ + ApiError(Mock()), + TimeoutError, + ClientConnectorError(Mock(), Mock()), + ClientError, + ], +) +async def test_button_failure(hass: HomeAssistant, exc: Exception) -> None: + """Tests that the press action throws HomeAssistantError.""" + await init_integration(hass) + + with ( + patch("homeassistant.components.nextdns.NextDns.clear_logs", side_effect=exc), + pytest.raises( + HomeAssistantError, + match="An error occurred while calling the NextDNS API method for button.fake_profile_clear_logs", + ), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, + blocking=True, + ) + + +async def test_button_auth_error(hass: HomeAssistant) -> None: + """Tests that the press action starts re-auth flow.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.clear_logs", + side_effect=InvalidApiKeyError, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index 6e344e34336..c85525ac457 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -5,12 +5,14 @@ from unittest.mock import Mock, patch from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ApiError +from nextdns import ApiError, InvalidApiKeyError import pytest from syrupy import SnapshotAssertion from tenacity import RetryError +from homeassistant.components.nextdns.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -158,3 +160,32 @@ async def test_switch_failure(hass: HomeAssistant, exc: Exception) -> None: {ATTR_ENTITY_ID: "switch.fake_profile_block_page"}, blocking=True, ) + + +async def test_switch_auth_error(hass: HomeAssistant) -> None: + """Tests that the turn on/off action starts re-auth flow.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.set_setting", + side_effect=InvalidApiKeyError, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "switch.fake_profile_block_page"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nut/test_button.py b/tests/components/nut/test_button.py new file mode 100644 index 00000000000..bbcc521b7f3 --- /dev/null +++ b/tests/components/nut/test_button.py @@ -0,0 +1,102 @@ +"""Test the NUT button platform.""" + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.nut.const import INTEGRATION_SUPPORTED_COMMANDS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .util import async_init_integration + + +@pytest.mark.parametrize( + "model", + [ + "CP1350C", + "5E650I", + "5E850I", + "CP1500PFCLCD", + "DL650ELCD", + "EATON5P1550", + "blazer_usb", + ], +) +async def test_buttons_ups( + hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str +) -> None: + """Tests that there are no standard buttons.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + button = hass.states.get("button.ups1_power_cycle_outlet_1") + assert not button + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000_", + ), + ], +) +async def test_buttons_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Tests that the button entities are correct.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + for num in range(1, 25): + command = f"outlet.{num!s}.load.cycle" + list_commands_return_value[command] = command + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + entity_id = "button.ups1_power_cycle_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{unique_id_base}outlet.1.load.cycle" + + button = hass.states.get(entity_id) + assert button + assert button.state == STATE_UNKNOWN + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + + button = hass.states.get(entity_id) + assert button.state != STATE_UNKNOWN + + button = hass.states.get("button.ups1_power_cycle_outlet_25") + assert not button + + button = hass.states.get("button.ups1_power_cycle_outlet_a25") + assert not button diff --git a/tests/components/nut/test_sensor.py b/tests/components/nut/test_sensor.py index 6483d581070..cdec6c5083b 100644 --- a/tests/components/nut/test_sensor.py +++ b/tests/components/nut/test_sensor.py @@ -12,6 +12,7 @@ from homeassistant.const import ( CONF_RESOURCES, PERCENTAGE, STATE_UNKNOWN, + UnitOfElectricCurrent, UnitOfElectricPotential, ) from homeassistant.core import HomeAssistant @@ -103,7 +104,7 @@ async def test_ups_devices_with_unique_ids( [ ( "EATON-EPDU-G3", - "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000_", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", ), ], ) @@ -115,11 +116,13 @@ async def test_pdu_devices_with_unique_ids( ) -> None: """Test creation of device sensors with unique ids.""" - await _test_sensor_and_attributes( + await async_init_integration(hass, model) + + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}input.voltage", + unique_id=f"{unique_id_base}_input.voltage", device_id="sensor.ups1_input_voltage", state_value="122.91", expected_attributes={ @@ -130,11 +133,11 @@ async def test_pdu_devices_with_unique_ids( }, ) - await _test_sensor_and_attributes( + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}ambient.humidity.status", + unique_id=f"{unique_id_base}_ambient.humidity.status", device_id="sensor.ups1_ambient_humidity_status", state_value="good", expected_attributes={ @@ -143,11 +146,11 @@ async def test_pdu_devices_with_unique_ids( }, ) - await _test_sensor_and_attributes( + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}ambient.temperature.status", + unique_id=f"{unique_id_base}_ambient.temperature.status", device_id="sensor.ups1_ambient_temperature_status", state_value="good", expected_attributes={ @@ -248,7 +251,7 @@ async def test_stale_options( [ ( "EATON-EPDU-G3-AMBIENT-NOT-PRESENT", - "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000_", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", ), ], ) @@ -273,3 +276,57 @@ async def test_pdu_devices_ambient_not_present( entry = entity_registry.async_get("sensor.ups1_ambient_temperature_status") assert not entry + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", + ), + ], +) +async def test_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Test for dynamically created outlet sensors.""" + + await async_init_integration(hass, model) + + _test_sensor_and_attributes( + hass, + entity_registry, + model, + unique_id=f"{unique_id_base}_outlet.1.current", + device_id="sensor.ups1_outlet_a1_current", + state_value="0", + expected_attributes={ + "device_class": SensorDeviceClass.CURRENT, + "friendly_name": "Ups1 Outlet A1 current", + "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + }, + ) + + _test_sensor_and_attributes( + hass, + entity_registry, + model, + unique_id=f"{unique_id_base}_outlet.24.current", + device_id="sensor.ups1_outlet_a24_current", + state_value="0.19", + expected_attributes={ + "device_class": SensorDeviceClass.CURRENT, + "friendly_name": "Ups1 Outlet A24 current", + "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + }, + ) + + entry = entity_registry.async_get("sensor.ups1_outlet_25_current") + assert not entry + + entry = entity_registry.async_get("sensor.ups1_outlet_a25_current") + assert not entry diff --git a/tests/components/nut/test_switch.py b/tests/components/nut/test_switch.py new file mode 100644 index 00000000000..f2de5eeb5e6 --- /dev/null +++ b/tests/components/nut/test_switch.py @@ -0,0 +1,159 @@ +"""Test the NUT switch platform.""" + +import json +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.nut.const import INTEGRATION_SUPPORTED_COMMANDS +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .util import async_init_integration + +from tests.common import load_fixture + + +@pytest.mark.parametrize( + "model", + [ + "CP1350C", + "5E650I", + "5E850I", + "CP1500PFCLCD", + "DL650ELCD", + "EATON5P1550", + "blazer_usb", + ], +) +async def test_switch_ups( + hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str +) -> None: + """Tests that there are no standard switches.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + switch = hass.states.get("switch.ups1_power_outlet_1") + assert not switch + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", + ), + ], +) +async def test_switch_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Tests that the switch entities are correct.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + for num in range(1, 25): + command = f"outlet.{num!s}.load.on" + list_commands_return_value[command] = command + command = f"outlet.{num!s}.load.off" + list_commands_return_value[command] = command + + ups_fixture = f"nut/{model}.json" + list_vars = json.loads(load_fixture(ups_fixture)) + + run_command = AsyncMock() + + await async_init_integration( + hass, + model, + list_vars=list_vars, + list_commands_return_value=list_commands_return_value, + run_command=run_command, + ) + + entity_id = "switch.ups1_power_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{unique_id_base}_outlet.1.load.poweronoff" + + switch = hass.states.get(entity_id) + assert switch + assert switch.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + run_command.assert_called_with("ups1", "outlet.1.load.off") + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + run_command.assert_called_with("ups1", "outlet.1.load.on") + + switch = hass.states.get("switch.ups1_power_outlet_25") + assert not switch + + switch = hass.states.get("switch.ups1_power_outlet_a25") + assert not switch + + +async def test_switch_pdu_dynamic_outlets_state_unknown( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch entity with missing status is reported as unknown.""" + + config_entry = await async_init_integration( + hass, + list_ups={"ups1": "UPS 1"}, + list_vars={ + "outlet.count": "1", + "outlet.1.switchable": "yes", + "outlet.1.name": "A1", + }, + list_commands_return_value={ + "outlet.1.load.on": None, + "outlet.1.load.off": None, + }, + ) + + entity_id = "switch.ups1_power_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{config_entry.entry_id}_outlet.1.load.poweronoff" + + switch = hass.states.get(entity_id) + assert switch + assert switch.state == STATE_UNKNOWN diff --git a/tests/components/nut/util.py b/tests/components/nut/util.py index bd82ffdd6b4..07c073f0286 100644 --- a/tests/components/nut/util.py +++ b/tests/components/nut/util.py @@ -82,7 +82,7 @@ async def async_init_integration( return entry -async def _test_sensor_and_attributes( +def _test_sensor_and_attributes( hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str, @@ -91,9 +91,8 @@ async def _test_sensor_and_attributes( state_value: str, expected_attributes: dict, ) -> None: - """Test creation of device sensors with unique ids.""" + """Test all of the sensor entry attributes.""" - await async_init_integration(hass, model) entry = entity_registry.async_get(device_id) assert entry assert entry.unique_id == unique_id diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index 39e954af15a..1de8f67fbdb 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -176,7 +176,7 @@ WEATHER_EXPECTED_OBSERVATION_METRIC = { ATTR_WEATHER_HUMIDITY: 10, } -NONE_OBSERVATION = {key: None for key in DEFAULT_OBSERVATION} +NONE_OBSERVATION = dict.fromkeys(DEFAULT_OBSERVATION) DEFAULT_FORECAST = [ { @@ -235,4 +235,4 @@ EXPECTED_FORECAST_METRIC = { ATTR_FORECAST_HUMIDITY: 75, } -NONE_FORECAST = [{key: None for key in DEFAULT_FORECAST[0]}] +NONE_FORECAST = [dict.fromkeys(DEFAULT_FORECAST[0])] diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py index d05e34d1ed2..e8a7d27b2c3 100644 --- a/tests/components/ohme/conftest.py +++ b/tests/components/ohme/conftest.py @@ -60,6 +60,8 @@ def mock_client(): client.preconditioning = 15 client.serial = "chargerid" client.ct_connected = True + client.cap_available = True + client.cap_enabled = True client.energy = 1000 client.device_info = { "name": "Ohme Home Pro", diff --git a/tests/components/ohme/snapshots/test_diagnostics.ambr b/tests/components/ohme/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..f51c701b71b --- /dev/null +++ b/tests/components/ohme/snapshots/test_diagnostics.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'cap_available': True, + 'ct_connected': True, + 'device_info': dict({ + 'model': 'Home Pro', + 'name': 'Ohme Home Pro', + 'sw_version': 'v2.65', + }), + 'vehicles': list([ + 'Nissan Leaf', + 'Tesla Model 3', + ]), + }) +# --- diff --git a/tests/components/ohme/snapshots/test_switch.ambr b/tests/components/ohme/snapshots/test_switch.ambr index 49bf5d5709a..4790d96c551 100644 --- a/tests/components/ohme/snapshots/test_switch.ambr +++ b/tests/components/ohme/snapshots/test_switch.ambr @@ -46,6 +46,53 @@ 'state': 'on', }) # --- +# name: test_switches[switch.ohme_home_pro_price_cap-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.ohme_home_pro_price_cap', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Price cap', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'price_cap', + 'unique_id': 'chargerid_price_cap', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.ohme_home_pro_price_cap-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Price cap', + }), + 'context': , + 'entity_id': 'switch.ohme_home_pro_price_cap', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switches[switch.ohme_home_pro_require_approval-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py index bb7ecc00bdc..b8754711d76 100644 --- a/tests/components/ohme/test_config_flow.py +++ b/tests/components/ohme/test_config_flow.py @@ -182,3 +182,84 @@ async def test_reauth_fail( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_form(hass: HomeAssistant, mock_client: MagicMock) -> None: + """Test reconfigure form.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reconfigure", "entry_id": entry.entry_id} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_reconfigure_fail( + hass: HomeAssistant, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test reconfigure errors.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reconfigure", "entry_id": entry.entry_id} + ) + + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Simulate failed login attempt + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # Retry with a successful login + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/ohme/test_diagnostics.py b/tests/components/ohme/test_diagnostics.py new file mode 100644 index 00000000000..6aab1262189 --- /dev/null +++ b/tests/components/ohme/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Tests for the diagnostics data provided by the Ohme integration.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/ohme/test_services.py b/tests/components/ohme/test_services.py index 76c7ce94b57..2513635c1c2 100644 --- a/tests/components/ohme/test_services.py +++ b/tests/components/ohme/test_services.py @@ -1,6 +1,6 @@ """Tests for services.""" -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from syrupy.assertion import SnapshotAssertion @@ -8,6 +8,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.ohme.const import DOMAIN from homeassistant.components.ohme.services import ( ATTR_CONFIG_ENTRY, + ATTR_PRICE_CAP, SERVICE_LIST_CHARGE_SLOTS, ) from homeassistant.core import HomeAssistant @@ -47,6 +48,30 @@ async def test_list_charge_slots( ) +async def test_set_price_cap( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test set price cap service.""" + + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + DOMAIN, + "set_price_cap", + { + ATTR_CONFIG_ENTRY: mock_config_entry.entry_id, + ATTR_PRICE_CAP: 10.0, + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(cap=10.0) + + async def test_list_charge_slots_exception( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/ohme/test_switch.py b/tests/components/ohme/test_switch.py index b16b70d67f8..8d82a5a3ea4 100644 --- a/tests/components/ohme/test_switch.py +++ b/tests/components/ohme/test_switch.py @@ -1,6 +1,6 @@ """Tests for switches.""" -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from syrupy import SnapshotAssertion @@ -32,7 +32,49 @@ async def test_switches( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_switch_on( +async def test_cap_switch_on( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the switch turn_on action.""" + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.ohme_home_pro_price_cap", + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(True) + + +async def test_cap_switch_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the switch turn_off action.""" + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.ohme_home_pro_price_cap", + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(False) + + +async def test_config_switch_on( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_client: MagicMock, @@ -52,7 +94,7 @@ async def test_switch_on( assert len(mock_client.async_set_configuration_value.mock_calls) == 1 -async def test_switch_off( +async def test_config_switch_off( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_client: MagicMock, diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index db641ba703b..c718aab1e81 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -4,6 +4,7 @@ from collections.abc import AsyncGenerator from typing import Any from unittest.mock import AsyncMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory from ollama import Message, ResponseError import pytest from syrupy.assertion import SnapshotAssertion @@ -404,7 +405,10 @@ async def test_unknown_hass_api( async def test_message_history_trimming( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + freezer: FrozenDateTimeFactory, ) -> None: """Test that a single message history is trimmed according to the config.""" response_idx = 0 diff --git a/tests/components/onboarding/snapshots/test_views.ambr b/tests/components/onboarding/snapshots/test_views.ambr index 2d084bd9ade..48ddf30d1f2 100644 --- a/tests/components/onboarding/snapshots/test_views.ambr +++ b/tests/components/onboarding/snapshots/test_views.ambr @@ -62,7 +62,7 @@ 'with_automatic_settings': None, }), ]), - 'last_non_idle_event': None, + 'last_action_event': None, 'state': 'idle', }) # --- diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index d0a6afa50b5..509dece7dd0 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -6,12 +6,16 @@ from http import HTTPStatus from io import StringIO import os from typing import Any -from unittest.mock import ANY, AsyncMock, Mock, patch +from unittest.mock import ANY, DEFAULT, AsyncMock, MagicMock, Mock, patch +from hass_nabucasa.auth import CognitoAuth +from hass_nabucasa.const import STATE_CONNECTED +from hass_nabucasa.iot import CloudIoT import pytest from syrupy import SnapshotAssertion from homeassistant.components import backup, onboarding +from homeassistant.components.cloud import DOMAIN as CLOUD_DOMAIN, CloudClient from homeassistant.components.onboarding import const, views from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -1067,3 +1071,139 @@ async def test_onboarding_backup_upload( assert resp.status == 201 assert await resp.json() == {"backup_id": "abc123"} mock_receive.assert_called_once_with(agent_ids=["backup.local"], contents=ANY) + + +@pytest.fixture(name="cloud") +async def cloud_fixture() -> AsyncGenerator[MagicMock]: + """Mock the cloud object. + + See the real hass_nabucasa.Cloud class for how to configure the mock. + """ + with patch( + "homeassistant.components.cloud.Cloud", autospec=True + ) as mock_cloud_class: + mock_cloud = mock_cloud_class.return_value + + mock_cloud.auth = MagicMock(spec=CognitoAuth) + mock_cloud.iot = MagicMock( + spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED + ) + + def set_up_mock_cloud( + cloud_client: CloudClient, mode: str, **kwargs: Any + ) -> DEFAULT: + """Set up mock cloud with a mock constructor.""" + + # Attributes set in the constructor with parameters. + mock_cloud.client = cloud_client + + return DEFAULT + + mock_cloud_class.side_effect = set_up_mock_cloud + + # Attributes that we mock with default values. + mock_cloud.id_token = None + mock_cloud.is_logged_in = False + + yield mock_cloud + + +@pytest.fixture(name="setup_cloud") +async def setup_cloud_fixture(hass: HomeAssistant, cloud: MagicMock) -> None: + """Fixture that sets up cloud.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, CLOUD_DOMAIN, {}) + await hass.async_block_till_done() + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_forgot_password( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test cloud forgot password.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + + mock_cognito = cloud.auth + + req = await client.post( + "/api/onboarding/cloud/forgot_password", json={"email": "hello@bla.com"} + ) + + assert req.status == HTTPStatus.OK + assert mock_cognito.async_forgot_password.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_login( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.post( + "/api/onboarding/cloud/login", + json={"email": "my_username", "password": "my_password"}, + ) + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"cloud_pipeline": None, "success": True} + assert cloud.login.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_logout( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.post("/api/onboarding/cloud/logout") + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"message": "ok"} + assert cloud.logout.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_status( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.get("/api/onboarding/cloud/status") + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"logged_in": False} diff --git a/tests/components/onvif/__init__.py b/tests/components/onvif/__init__.py index 8a86538b977..868624fb2e4 100644 --- a/tests/components/onvif/__init__.py +++ b/tests/components/onvif/__init__.py @@ -123,7 +123,7 @@ def setup_mock_onvif_camera( mock_onvif_camera.side_effect = mock_constructor -def setup_mock_device(mock_device, capabilities=None): +def setup_mock_device(mock_device, capabilities=None, profiles=None): """Prepare mock ONVIFDevice.""" mock_device.async_setup = AsyncMock(return_value=True) mock_device.port = 80 @@ -145,7 +145,7 @@ def setup_mock_device(mock_device, capabilities=None): ptz=None, video_source_token=None, ) - mock_device.profiles = [profile1] + mock_device.profiles = profiles or [profile1] mock_device.events = MagicMock( webhook_manager=MagicMock(state=WebHookManagerState.STARTED), pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED), diff --git a/tests/components/onvif/test_init.py b/tests/components/onvif/test_init.py new file mode 100644 index 00000000000..c176bdcc112 --- /dev/null +++ b/tests/components/onvif/test_init.py @@ -0,0 +1,102 @@ +"""Tests for the ONVIF integration __init__ module.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MAC, setup_mock_device + +from tests.common import MockConfigEntry + + +@pytest.mark.asyncio +async def test_migrate_camera_entities_unique_ids(hass: HomeAssistant) -> None: + """Test that camera entities unique ids get migrated properly.""" + config_entry = MockConfigEntry(domain="onvif", unique_id=MAC) + config_entry.add_to_hass(hass) + + entity_registry = er.async_get(hass) + + entity_with_only_mac = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=MAC, + config_entry=config_entry, + ) + entity_with_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_1", + config_entry=config_entry, + ) + # This one should not be migrated (different domain) + entity_sensor = entity_registry.async_get_or_create( + domain="sensor", + platform="onvif", + unique_id=MAC, + config_entry=config_entry, + ) + # This one should not be migrated (already migrated) + entity_migrated = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}#profile_token_2", + config_entry=config_entry, + ) + # Unparsable index + entity_unparsable_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_a", + config_entry=config_entry, + ) + # Unexisting index + entity_unexisting_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_9", + config_entry=config_entry, + ) + + with patch("homeassistant.components.onvif.ONVIFDevice") as mock_device: + setup_mock_device( + mock_device, + capabilities=None, + profiles=[ + MagicMock(token="profile_token_0"), + MagicMock(token="profile_token_1"), + MagicMock(token="profile_token_2"), + ], + ) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_with_only_mac = entity_registry.async_get(entity_with_only_mac.entity_id) + entity_with_index = entity_registry.async_get(entity_with_index.entity_id) + entity_sensor = entity_registry.async_get(entity_sensor.entity_id) + entity_migrated = entity_registry.async_get(entity_migrated.entity_id) + + assert entity_with_only_mac is not None + assert entity_with_only_mac.unique_id == f"{MAC}#profile_token_0" + + assert entity_with_index is not None + assert entity_with_index.unique_id == f"{MAC}#profile_token_1" + + # Make sure the sensor entity is unchanged + assert entity_sensor is not None + assert entity_sensor.unique_id == MAC + + # Make sure the already migrated entity is unchanged + assert entity_migrated is not None + assert entity_migrated.unique_id == f"{MAC}#profile_token_2" + + # Make sure the unparsable index entity is unchanged + assert entity_unparsable_index is not None + assert entity_unparsable_index.unique_id == f"{MAC}_a" + + # Make sure the unexisting index entity is unchanged + assert entity_unexisting_index is not None + assert entity_unexisting_index.unique_id == f"{MAC}_9" diff --git a/tests/components/onvif/test_parsers.py b/tests/components/onvif/test_parsers.py index 4f7e10abae6..8448a6e8195 100644 --- a/tests/components/onvif/test_parsers.py +++ b/tests/components/onvif/test_parsers.py @@ -5,6 +5,7 @@ import os import onvif import onvif.settings +import pytest from zeep import Client from zeep.transports import Transport @@ -732,25 +733,24 @@ async def test_tapo_intrusion(hass: HomeAssistant) -> None: async def test_tapo_missing_attributes(hass: HomeAssistant) -> None: """Tests async_parse_tplink_detector with missing fields.""" - event = await get_event( - { - "Message": { - "_value_1": { - "Data": { - "ElementItem": [], - "Extension": None, - "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], - "_attr_1": None, - }, - } - }, - "Topic": { - "_value_1": "tns1:RuleEngine/PeopleDetector/People", - }, - } - ) - - assert event is None + with pytest.raises(AttributeError, match="SimpleItem"): + await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) async def test_tapo_unknown_type(hass: HomeAssistant) -> None: @@ -789,3 +789,93 @@ async def test_tapo_unknown_type(hass: HomeAssistant) -> None: ) assert event is None + + +async def test_reolink_package(hass: HomeAssistant) -> None: + """Tests reolink package event.""" + event = await get_event( + { + "SubscriptionReference": None, + "Topic": { + "_value_1": "tns1:RuleEngine/MyRuleDetector/Package", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": None, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [{"Name": "Source", "Value": "000"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "State", "Value": "true"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime( + 2025, 3, 12, 9, 54, 27, tzinfo=datetime.UTC + ), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Package Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "occupancy" + assert event.value + assert event.uid == (f"{TEST_UID}_tns1:RuleEngine/MyRuleDetector/Package_000") + + +async def test_hikvision_alarm(hass: HomeAssistant) -> None: + """Tests hikvision camera alarm event.""" + event = await get_event( + { + "SubscriptionReference": None, + "Topic": { + "_value_1": "tns1:Device/Trigger/tnshik:AlarmIn", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": None, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [{"Name": "AlarmInToken", "Value": "AlarmIn_1"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "State", "Value": "true"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime( + 2025, 3, 13, 22, 57, 26, tzinfo=datetime.UTC + ), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Motion Alarm" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == (f"{TEST_UID}_tns1:Device/Trigger/tnshik:AlarmIn_AlarmIn_1") diff --git a/tests/components/openai_conversation/test_config_flow.py b/tests/components/openai_conversation/test_config_flow.py index 90a08471f39..17a5aad6478 100644 --- a/tests/components/openai_conversation/test_config_flow.py +++ b/tests/components/openai_conversation/test_config_flow.py @@ -1,9 +1,10 @@ """Test the OpenAI Conversation config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -from httpx import Response +import httpx from openai import APIConnectionError, AuthenticationError, BadRequestError +from openai.types.responses import Response, ResponseOutputMessage, ResponseOutputText import pytest from homeassistant import config_entries @@ -16,6 +17,13 @@ from homeassistant.components.openai_conversation.const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, @@ -117,13 +125,17 @@ async def test_options_unsupported_model( (APIConnectionError(request=None), "cannot_connect"), ( AuthenticationError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "invalid_auth", ), ( BadRequestError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "unknown", ), @@ -172,6 +184,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, ), ( @@ -183,6 +198,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, { CONF_RECOMMENDED: True, @@ -225,3 +243,105 @@ async def test_options_switching( await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == expected_options + + +async def test_options_web_search_user_location( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test fetching user location.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + hass.config.country = "US" + hass.config.time_zone = "America/Los_Angeles" + hass.states.async_set( + "zone.home", "0", {"latitude": 37.7749, "longitude": -122.4194} + ) + with patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create: + mock_create.return_value = Response( + object="response", + id="resp_A", + created_at=1700000000, + model="gpt-4o-mini", + parallel_tool_calls=True, + tool_choice="auto", + tools=[], + output=[ + ResponseOutputMessage( + type="message", + id="msg_A", + content=[ + ResponseOutputText( + type="output_text", + text='{"city": "San Francisco", "region": "California"}', + annotations=[], + ) + ], + role="assistant", + status="completed", + ) + ], + ) + + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + }, + ) + await hass.async_block_till_done() + assert ( + mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following" + " coordinates located: (37.7749, -122.4194)?" + ) + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + } + + +async def test_options_web_search_unsupported_model( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form giving error about web search not being available.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + result = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_CHAT_MODEL: "o1-pro", + CONF_LLM_HASS_API: "assist", + CONF_WEB_SEARCH: True, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"web_search": "web_search_not_supported"} diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 238fd5f2d7b..d6f09e0f30e 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -3,20 +3,52 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch -from httpx import Response +import httpx from openai import AuthenticationError, RateLimitError -from openai.types.chat.chat_completion_chunk import ( - ChatCompletionChunk, - Choice, - ChoiceDelta, - ChoiceDeltaToolCall, - ChoiceDeltaToolCallFunction, +from openai.types import ResponseFormatText +from openai.types.responses import ( + Response, + ResponseCompletedEvent, + ResponseContentPartAddedEvent, + ResponseContentPartDoneEvent, + ResponseCreatedEvent, + ResponseError, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseFunctionToolCall, + ResponseFunctionWebSearch, + ResponseIncompleteEvent, + ResponseInProgressEvent, + ResponseOutputItemAddedEvent, + ResponseOutputItemDoneEvent, + ResponseOutputMessage, + ResponseOutputText, + ResponseReasoningItem, + ResponseStreamEvent, + ResponseTextConfig, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, + ResponseWebSearchCallCompletedEvent, + ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent, ) +from openai.types.responses.response import IncompleteDetails import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.components.openai_conversation.const import ( + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, +) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent @@ -28,40 +60,88 @@ from tests.components.conversation import ( mock_chat_log, # noqa: F401 ) -ASSIST_RESPONSE_FINISH = ( - # Assistant message - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, delta=ChoiceDelta(content="Cool"))], - ), - # Finish stream - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, finish_reason="stop", delta=ChoiceDelta())], - ), -) - @pytest.fixture def mock_create_stream() -> Generator[AsyncMock]: """Mock stream response.""" - async def mock_generator(stream): - for value in stream: + async def mock_generator(events, **kwargs): + response = Response( + id="resp_A", + created_at=1700000000, + error=None, + incomplete_details=None, + instructions=kwargs.get("instructions"), + metadata=kwargs.get("metadata", {}), + model=kwargs.get("model", "gpt-4o-mini"), + object="response", + output=[], + parallel_tool_calls=kwargs.get("parallel_tool_calls", True), + temperature=kwargs.get("temperature", 1.0), + tool_choice=kwargs.get("tool_choice", "auto"), + tools=kwargs.get("tools"), + top_p=kwargs.get("top_p", 1.0), + max_output_tokens=kwargs.get("max_output_tokens", 100000), + previous_response_id=kwargs.get("previous_response_id"), + reasoning=kwargs.get("reasoning"), + status="in_progress", + text=kwargs.get( + "text", ResponseTextConfig(format=ResponseFormatText(type="text")) + ), + truncation=kwargs.get("truncation", "disabled"), + usage=None, + user=kwargs.get("user"), + store=kwargs.get("store", True), + ) + yield ResponseCreatedEvent( + response=response, + type="response.created", + ) + yield ResponseInProgressEvent( + response=response, + type="response.in_progress", + ) + response.status = "completed" + + for value in events: + if isinstance(value, ResponseOutputItemDoneEvent): + response.output.append(value.item) + elif isinstance(value, IncompleteDetails): + response.status = "incomplete" + response.incomplete_details = value + break + if isinstance(value, ResponseError): + response.status = "failed" + response.error = value + break + yield value + if isinstance(value, ResponseErrorEvent): + return + + if response.status == "incomplete": + yield ResponseIncompleteEvent( + response=response, + type="response.incomplete", + ) + elif response.status == "failed": + yield ResponseFailedEvent( + response=response, + type="response.failed", + ) + else: + yield ResponseCompletedEvent( + response=response, + type="response.completed", + ) + with patch( - "openai.resources.chat.completions.AsyncCompletions.create", + "openai.resources.responses.AsyncResponses.create", AsyncMock(), ) as mock_create: mock_create.side_effect = lambda **kwargs: mock_generator( - mock_create.return_value.pop(0) + mock_create.return_value.pop(0), **kwargs ) yield mock_create @@ -99,13 +179,17 @@ async def test_entity( [ ( RateLimitError( - response=Response(status_code=429, request=""), body=None, message=None + response=httpx.Response(status_code=429, request=""), + body=None, + message=None, ), "Rate limited or insufficient funds", ), ( AuthenticationError( - response=Response(status_code=401, request=""), body=None, message=None + response=httpx.Response(status_code=401, request=""), + body=None, + message=None, ), "Error talking to OpenAI", ), @@ -120,7 +204,7 @@ async def test_error_handling( ) -> None: """Test that we handle errors when calling completion API.""" with patch( - "openai.resources.chat.completions.AsyncCompletions.create", + "openai.resources.responses.AsyncResponses.create", new_callable=AsyncMock, side_effect=exception, ): @@ -132,6 +216,121 @@ async def test_error_handling( assert result.response.speech["plain"]["speech"] == message, result.response.speech +@pytest.mark.parametrize( + ("reason", "message"), + [ + ( + "max_output_tokens", + "max output tokens reached", + ), + ( + "content_filter", + "content filter triggered", + ), + ( + None, + "unknown reason", + ), + ], +) +async def test_incomplete_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + reason: str, + message: str, +) -> None: + """Test handling early model stop.""" + # Incomplete details received after some content is generated + mock_create_stream.return_value = [ + ( + # Start message + *create_message_item( + id="msg_A", + text=["Once upon", " a time, ", "there was "], + output_index=0, + ), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "Please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + # Incomplete details received before any content is generated + mock_create_stream.return_value = [ + ( + # Start generating response + *create_reasoning_item(id="rs_A", output_index=0), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + +@pytest.mark.parametrize( + ("error", "message"), + [ + ( + ResponseError(code="rate_limit_exceeded", message="Rate limit exceeded"), + "OpenAI response failed: Rate limit exceeded", + ), + ( + ResponseErrorEvent(type="error", message="Some error"), + "OpenAI response error: Some error", + ), + ], +) +async def test_failed_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + error: ResponseError | ResponseErrorEvent, + message: str, +) -> None: + """Test handling failed and error responses.""" + mock_create_stream.return_value = [(error,)] + + result = await conversation.async_converse( + hass, + "next natural number please", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.speech["plain"]["speech"] == message, result.response.speech + + async def test_conversation_agent( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -144,6 +343,200 @@ async def test_conversation_agent( assert agent.supported_languages == "*" +def create_message_item( + id: str, text: str | list[str], output_index: int +) -> list[ResponseStreamEvent]: + """Create a message item.""" + if isinstance(text, str): + text = [text] + + content = ResponseOutputText(annotations=[], text="", type="output_text") + events = [ + ResponseOutputItemAddedEvent( + item=ResponseOutputMessage( + id=id, + content=[], + type="message", + role="assistant", + status="in_progress", + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseContentPartAddedEvent( + content_index=0, + item_id=id, + output_index=output_index, + part=content, + type="response.content_part.added", + ), + ] + + content.text = "".join(text) + events.extend( + ResponseTextDeltaEvent( + content_index=0, + delta=delta, + item_id=id, + output_index=output_index, + type="response.output_text.delta", + ) + for delta in text + ) + + events.extend( + [ + ResponseTextDoneEvent( + content_index=0, + item_id=id, + output_index=output_index, + text="".join(text), + type="response.output_text.done", + ), + ResponseContentPartDoneEvent( + content_index=0, + item_id=id, + output_index=output_index, + part=content, + type="response.content_part.done", + ), + ResponseOutputItemDoneEvent( + item=ResponseOutputMessage( + id=id, + content=[content], + role="assistant", + status="completed", + type="message", + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + ) + + return events + + +def create_function_tool_call_item( + id: str, arguments: str | list[str], call_id: str, name: str, output_index: int +) -> list[ResponseStreamEvent]: + """Create a function tool call item.""" + if isinstance(arguments, str): + arguments = [arguments] + + events = [ + ResponseOutputItemAddedEvent( + item=ResponseFunctionToolCall( + id=id, + arguments="", + call_id=call_id, + name=name, + type="function_call", + status="in_progress", + ), + output_index=output_index, + type="response.output_item.added", + ) + ] + + events.extend( + ResponseFunctionCallArgumentsDeltaEvent( + delta=delta, + item_id=id, + output_index=output_index, + type="response.function_call_arguments.delta", + ) + for delta in arguments + ) + + events.append( + ResponseFunctionCallArgumentsDoneEvent( + arguments="".join(arguments), + item_id=id, + output_index=output_index, + type="response.function_call_arguments.done", + ) + ) + + events.append( + ResponseOutputItemDoneEvent( + item=ResponseFunctionToolCall( + id=id, + arguments="".join(arguments), + call_id=call_id, + name=name, + type="function_call", + status="completed", + ), + output_index=output_index, + type="response.output_item.done", + ) + ) + + return events + + +def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEvent]: + """Create a reasoning item.""" + return [ + ResponseOutputItemAddedEvent( + item=ResponseReasoningItem( + id=id, + summary=[], + type="reasoning", + status=None, + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseOutputItemDoneEvent( + item=ResponseReasoningItem( + id=id, + summary=[], + type="reasoning", + status=None, + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + + +def create_web_search_item(id: str, output_index: int) -> list[ResponseStreamEvent]: + """Create a web search call item.""" + return [ + ResponseOutputItemAddedEvent( + item=ResponseFunctionWebSearch( + id=id, status="in_progress", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseWebSearchCallInProgressEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.in_progress", + ), + ResponseWebSearchCallSearchingEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.searching", + ), + ResponseWebSearchCallCompletedEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.completed", + ), + ResponseOutputItemDoneEvent( + item=ResponseFunctionWebSearch( + id=id, status="completed", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + + async def test_function_call( hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, @@ -156,111 +549,27 @@ async def test_function_call( mock_create_stream.return_value = [ # Initial conversation ( + # Wait for the model to think + *create_reasoning_item(id="rs_A", output_index=0), # First tool call - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='{"para', - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='m1":"call1"}', - ), - ) - ] - ), - ) - ], + *create_function_tool_call_item( + id="fc_1", + arguments=['{"para', 'm1":"call1"}'], + call_id="call_call_1", + name="test_tool", + output_index=1, ), # Second tool call - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_2", - index=1, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments='{"param1":"call2"}', - ), - ) - ] - ), - ) - ], - ), - # Finish stream - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice(index=0, finish_reason="tool_calls", delta=ChoiceDelta()) - ], + *create_function_tool_call_item( + id="fc_2", + arguments='{"param1":"call2"}', + call_id="call_call_2", + name="test_tool", + output_index=2, ), ), # Response after tool responses - ASSIST_RESPONSE_FINISH, + create_message_item(id="msg_A", text="Cool", output_index=0), ] mock_chat_log.mock_tool_results( { @@ -288,99 +597,27 @@ async def test_function_call( ( "Test function call started with missing arguments", ( - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, delta=ChoiceDelta(content="Cool"))], + *create_function_tool_call_item( + id="fc_1", + arguments=[], + call_id="call_call_1", + name="test_tool", + output_index=0, ), + *create_message_item(id="msg_A", text="Cool", output_index=1), ), ), ( "Test invalid JSON", ( - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='{"para', - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta(content="Cool"), - finish_reason="tool_calls", - ) - ], + *create_function_tool_call_item( + id="fc_1", + arguments=['{"para'], + call_id="call_call_1", + name="test_tool", + output_index=0, ), + *create_message_item(id="msg_A", text="Cool", output_index=1), ), ), ], @@ -390,9 +627,8 @@ async def test_function_call_invalid( mock_config_entry_with_assist: MockConfigEntry, mock_init_component, mock_create_stream: AsyncMock, - mock_chat_log: MockChatLog, # noqa: F811 description: str, - messages: tuple[ChatCompletionChunk], + messages: tuple[ResponseStreamEvent], ) -> None: """Test function call containing invalid data.""" mock_create_stream.return_value = [messages] @@ -432,7 +668,9 @@ async def test_assist_api_tools_conversion( hass.states.async_set(f"{component}.test", "on") async_expose_entity(hass, "conversation", f"{component}.test", True) - mock_create_stream.return_value = [ASSIST_RESPONSE_FINISH] + mock_create_stream.return_value = [ + create_message_item(id="msg_A", text="Cool", output_index=0) + ] await conversation.async_converse( hass, "hello", None, Context(), agent_id="conversation.openai" @@ -440,3 +678,60 @@ async def test_assist_api_tools_conversion( tools = mock_create_stream.mock_calls[0][2]["tools"] assert tools + + +async def test_web_search( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + mock_create_stream, + mock_chat_log: MockChatLog, # noqa: F811 +) -> None: + """Test web_search_tool.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "low", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + }, + ) + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + message = "Home Assistant now supports ChatGPT Search in Assist" + mock_create_stream.return_value = [ + # Initial conversation + ( + *create_web_search_item(id="ws_A", output_index=0), + *create_message_item(id="msg_A", text=message, output_index=1), + ) + ] + + result = await conversation.async_converse( + hass, + "What's on the latest news?", + mock_chat_log.conversation_id, + Context(), + agent_id="conversation.openai", + ) + + assert mock_create_stream.mock_calls[0][2]["tools"] == [ + { + "type": "web_search_preview", + "search_context_size": "low", + "user_location": { + "type": "approximate", + "city": "San Francisco", + "region": "California", + "country": "US", + "timezone": "America/Los_Angeles", + }, + } + ] + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech["plain"]["speech"] == message, result.response.speech diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index d78ce398c92..5aef68841ee 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -1,8 +1,8 @@ """Tests for the OpenAI integration.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, mock_open, patch -from httpx import Response +import httpx from openai import ( APIConnectionError, AuthenticationError, @@ -11,8 +11,10 @@ from openai import ( ) from openai.types.image import Image from openai.types.images_response import ImagesResponse +from openai.types.responses import Response, ResponseOutputMessage, ResponseOutputText import pytest +from homeassistant.components.openai_conversation import CONF_FILENAMES from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.setup import async_setup_component @@ -114,7 +116,9 @@ async def test_generate_image_service_error( patch( "openai.resources.images.AsyncImages.generate", side_effect=RateLimitError( - response=Response(status_code=None, request=""), + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="") + ), body=None, message="Reason", ), @@ -133,22 +137,60 @@ async def test_generate_image_service_error( ) +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_with_image_not_allowed_path( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test generate content service with an image in a not allowed path.""" + with ( + patch("pathlib.Path.exists", return_value=True), + patch.object(hass.config, "is_allowed_path", return_value=False), + pytest.raises( + HomeAssistantError, + match=( + "Cannot read `doorbell_snapshot.jpg`, no access to path; " + "`allowlist_external_dirs` may need to be adjusted in " + "`configuration.yaml`" + ), + ), + ): + await hass.services.async_call( + "openai_conversation", + "generate_content", + { + "config_entry": mock_config_entry.entry_id, + "prompt": "Describe this image from my doorbell camera", + "filenames": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("service_name", "error"), + [ + ("generate_image", "Invalid config entry provided. Got invalid_entry"), + ("generate_content", "Invalid config entry provided. Got invalid_entry"), + ], +) async def test_invalid_config_entry( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, + service_name: str, + error: str, ) -> None: """Assert exception when invalid config entry is provided.""" service_data = { "prompt": "Picture of a dog", "config_entry": "invalid_entry", } - with pytest.raises( - ServiceValidationError, match="Invalid config entry provided. Got invalid_entry" - ): + with pytest.raises(ServiceValidationError, match=error): await hass.services.async_call( "openai_conversation", - "generate_image", + service_name, service_data, blocking=True, return_response=True, @@ -158,18 +200,29 @@ async def test_invalid_config_entry( @pytest.mark.parametrize( ("side_effect", "error"), [ - (APIConnectionError(request=None), "Connection error"), + ( + APIConnectionError(request=httpx.Request(method="GET", url="test")), + "Connection error", + ), ( AuthenticationError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="test") + ), + body=None, + message="", ), "Invalid API key", ), ( BadRequestError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="test") + ), + body=None, + message="", ), - "openai_conversation integration not ready yet: None", + "openai_conversation integration not ready yet", ), ], ) @@ -188,3 +241,253 @@ async def test_init_error( assert await async_setup_component(hass, "openai_conversation", {}) await hass.async_block_till_done() assert error in caplog.text + + +@pytest.mark.parametrize( + ("service_data", "expected_args", "number_of_files"), + [ + ( + {"prompt": "Picture of a dog", "filenames": []}, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + ], + }, + ], + }, + 0, + ), + ( + {"prompt": "Picture of a dog", "filenames": ["/a/b/c.jpg"]}, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE1", + "detail": "auto", + "file_id": "/a/b/c.jpg", + }, + ], + }, + ], + }, + 1, + ), + ( + { + "prompt": "Picture of a dog", + "filenames": ["/a/b/c.jpg", "d/e/f.jpg"], + }, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE1", + "detail": "auto", + "file_id": "/a/b/c.jpg", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE2", + "detail": "auto", + "file_id": "d/e/f.jpg", + }, + ], + }, + ], + }, + 2, + ), + ], +) +async def test_generate_content_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + service_data, + expected_args, + number_of_files, +) -> None: + """Test generate content service.""" + service_data["config_entry"] = mock_config_entry.entry_id + expected_args["model"] = "gpt-4o-mini" + expected_args["max_output_tokens"] = 150 + expected_args["top_p"] = 1.0 + expected_args["temperature"] = 1.0 + expected_args["user"] = None + expected_args["store"] = False + expected_args["input"][0]["type"] = "message" + expected_args["input"][0]["role"] = "user" + + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create, + patch( + "base64.b64encode", side_effect=[b"BASE64IMAGE1", b"BASE64IMAGE2"] + ) as mock_b64encode, + patch("builtins.open", mock_open(read_data="ABC")) as mock_file, + patch("pathlib.Path.exists", return_value=True), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + mock_create.return_value = Response( + object="response", + id="resp_A", + created_at=1700000000, + model="gpt-4o-mini", + parallel_tool_calls=True, + tool_choice="auto", + tools=[], + output=[ + ResponseOutputMessage( + type="message", + id="msg_A", + content=[ + ResponseOutputText( + type="output_text", + text="This is the response", + annotations=[], + ) + ], + role="assistant", + status="completed", + ) + ], + ) + + response = await hass.services.async_call( + "openai_conversation", + "generate_content", + service_data, + blocking=True, + return_response=True, + ) + assert response == {"text": "This is the response"} + assert len(mock_create.mock_calls) == 1 + assert mock_create.mock_calls[0][2] == expected_args + assert mock_b64encode.call_count == number_of_files + for idx, file in enumerate(service_data[CONF_FILENAMES]): + assert mock_file.call_args_list[idx][0][0] == file + + +@pytest.mark.parametrize( + ( + "service_data", + "error", + "number_of_files", + "exists_side_effect", + "is_allowed_side_effect", + ), + [ + ( + {"prompt": "Picture of a dog", "filenames": ["/a/b/c.jpg"]}, + "`/a/b/c.jpg` does not exist", + 0, + [False], + [True], + ), + ( + { + "prompt": "Picture of a dog", + "filenames": ["/a/b/c.jpg", "d/e/f.png"], + }, + "Cannot read `d/e/f.png`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`", + 1, + [True, True], + [True, False], + ), + ( + {"prompt": "Not a picture of a dog", "filenames": ["/a/b/c.pdf"]}, + "Only images are supported by the OpenAI API,`/a/b/c.pdf` is not an image file", + 1, + [True], + [True], + ), + ], +) +async def test_generate_content_service_invalid( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + service_data, + error, + number_of_files, + exists_side_effect, + is_allowed_side_effect, +) -> None: + """Test generate content service.""" + service_data["config_entry"] = mock_config_entry.entry_id + + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create, + patch( + "base64.b64encode", side_effect=[b"BASE64IMAGE1", b"BASE64IMAGE2"] + ) as mock_b64encode, + patch("builtins.open", mock_open(read_data="ABC")), + patch("pathlib.Path.exists", side_effect=exists_side_effect), + patch.object( + hass.config, "is_allowed_path", side_effect=is_allowed_side_effect + ), + ): + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + "openai_conversation", + "generate_content", + service_data, + blocking=True, + return_response=True, + ) + assert len(mock_create.mock_calls) == 0 + assert mock_b64encode.call_count == number_of_files + + +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test generate content service handles errors.""" + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + side_effect=RateLimitError( + response=httpx.Response( + status_code=417, request=httpx.Request(method="GET", url="") + ), + body=None, + message="Reason", + ), + ), + pytest.raises(HomeAssistantError, match="Error generating content: Reason"), + ): + await hass.services.async_call( + "openai_conversation", + "generate_content", + { + "config_entry": mock_config_entry.entry_id, + "prompt": "Image of an epic fail", + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/pglab/test_cover.py b/tests/components/pglab/test_cover.py new file mode 100644 index 00000000000..ea4c7a7213e --- /dev/null +++ b/tests/components/pglab/test_cover.py @@ -0,0 +1,210 @@ +"""The tests for the PG LAB Electronics cover.""" + +import json + +from homeassistant.components import cover +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, +) +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant + +from tests.common import async_fire_mqtt_message +from tests.typing import MqttMockHAClient + +COVER_FEATURES = ( + cover.CoverEntityFeature.OPEN + | cover.CoverEntityFeature.CLOSE + | cover.CoverEntityFeature.STOP +) + + +async def call_service(hass: HomeAssistant, entity_id, service, **kwargs): + """Call a service.""" + await hass.services.async_call( + COVER_DOMAIN, + service, + {"entity_id": entity_id, **kwargs}, + blocking=True, + ) + + +async def test_cover_features( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test cover features.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 4, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all("cover")) == 4 + + for i in range(4): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover + assert cover.attributes["supported_features"] == COVER_FEATURES + + +async def test_cover_availability( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Check if covers are properly created.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 6, "boards": "11000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # We are creating 6 covers using two E-RELAY devices connected to E-BOARD. + # Now we are going to check if all covers are created and their state is unknown. + for i in range(5): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # The cover with id 7 should not be created. + cover = hass.states.get("cover.test_shutter_7") + assert not cover + + +async def test_cover_change_state_via_mqtt( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test state update via MQTT.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # Check initial state is unknown + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Simulate the device responds sending mqtt messages and check if the cover state + # change appropriately. + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPEN") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + assert cover.state == STATE_OPEN + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPENING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_OPENING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSED") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSED + + +async def test_cover_mqtt_state_by_calling_service( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Calling service to OPEN/CLOSE cover and check mqtt state.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Call HA covers services and verify that the MQTT messages are sent correctly + + await call_service(hass, "cover.test_shutter_0", SERVICE_OPEN_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "OPEN", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_STOP_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "STOP", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_CLOSE_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "CLOSE", 0, False + ) + mqtt_mock.async_publish.reset_mock() diff --git a/tests/components/ping/snapshots/test_sensor.ambr b/tests/components/ping/snapshots/test_sensor.ambr index bb811af6a34..6b86c327863 100644 --- a/tests/components/ping/snapshots/test_sensor.ambr +++ b/tests/components/ping/snapshots/test_sensor.ambr @@ -26,7 +26,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Average', + 'original_name': 'Round-trip time average', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -38,7 +38,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Average', + 'friendly_name': '10.10.10.10 Round-trip time average', 'state_class': , 'unit_of_measurement': , }), @@ -77,7 +77,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Maximum', + 'original_name': 'Round-trip time maximum', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -89,7 +89,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Maximum', + 'friendly_name': '10.10.10.10 Round-trip time maximum', 'state_class': , 'unit_of_measurement': , }), @@ -134,7 +134,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Minimum', + 'original_name': 'Round-trip time minimum', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -146,7 +146,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Minimum', + 'friendly_name': '10.10.10.10 Round-trip time minimum', 'state_class': , 'unit_of_measurement': , }), diff --git a/tests/components/pterodactyl/__init__.py b/tests/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..a5b28d67ae3 --- /dev/null +++ b/tests/components/pterodactyl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Pterodactyl integration.""" diff --git a/tests/components/pterodactyl/conftest.py b/tests/components/pterodactyl/conftest.py new file mode 100644 index 00000000000..62326e79207 --- /dev/null +++ b/tests/components/pterodactyl/conftest.py @@ -0,0 +1,155 @@ +"""Common fixtures for the Pterodactyl tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from pydactyl.responses import PaginatedResponse +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL + +from tests.common import MockConfigEntry + +TEST_URL = "https://192.168.0.1:8080/" +TEST_API_KEY = "TestClientApiKey" +TEST_USER_INPUT = { + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, +} +TEST_SERVER_LIST_DATA = { + "meta": {"pagination": {"total": 2, "count": 2, "per_page": 50, "current_page": 1}}, + "data": [ + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "description": "Description of Test Server 1", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server1.jar", + "docker_image": "test_docker_image_1", + "egg_features": ["java_version"], + }, + }, + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "2", + "internal_id": 2, + "uuid": "2-2-2-2-2", + "name": "Test Server 2", + "node": "default_node", + "description": "Description of Test Server 2", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server_2.jar", + "docker_image": "test_docker_image2", + "egg_features": ["java_version"], + }, + }, + ], +} +TEST_SERVER = { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "is_node_under_maintenance": False, + "sftp_details": {"ip": "192.168.0.1", "port": 2022}, + "description": "", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test.jar", + "docker_image": "test_docker_image", + "egg_features": ["eula", "java_version", "pid_limit"], + "feature_limits": {"databases": 0, "allocations": 0, "backups": 3}, + "status": None, + "is_suspended": False, + "is_installing": False, + "is_transferring": False, + "relationships": {"allocations": {...}, "variables": {...}}, +} +TEST_SERVER_UTILIZATION = { + "current_state": "running", + "is_suspended": False, + "resources": { + "memory_bytes": 1111, + "cpu_absolute": 22, + "disk_bytes": 3333, + "network_rx_bytes": 44, + "network_tx_bytes": 55, + "uptime": 6666, + }, +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.pterodactyl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create Pterodactyl mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=None, + entry_id="01234567890123456789012345678901", + title=TEST_URL, + data={ + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, + }, + version=1, + ) + + +@pytest.fixture +def mock_pterodactyl(): + """Mock the Pterodactyl API.""" + with patch( + "homeassistant.components.pterodactyl.api.PterodactylClient", autospec=True + ) as mock: + mock.return_value.client.servers.list_servers.return_value = PaginatedResponse( + mock.return_value, "client", TEST_SERVER_LIST_DATA + ) + mock.return_value.client.servers.get_server.return_value = TEST_SERVER + mock.return_value.client.servers.get_server_utilization.return_value = ( + TEST_SERVER_UTILIZATION + ) + + yield mock.return_value diff --git a/tests/components/pterodactyl/test_config_flow.py b/tests/components/pterodactyl/test_config_flow.py new file mode 100644 index 00000000000..14bb2d2f69f --- /dev/null +++ b/tests/components/pterodactyl/test_config_flow.py @@ -0,0 +1,129 @@ +"""Test the Pterodactyl config flow.""" + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ClientConfigError, PterodactylApiError +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import TEST_URL, TEST_USER_INPUT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_pterodactyl", "mock_setup_entry") +async def test_full_flow(hass: HomeAssistant) -> None: + """Test full flow without errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + "exception_type", + [ + ClientConfigError, + PterodactylApiError, + ], +) +async def test_recovery_after_api_error( + hass: HomeAssistant, + exception_type, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = exception_type + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_recovery_after_unknown_error( + hass: HomeAssistant, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_service_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test config flow abort if the Pterodactyl server is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=TEST_USER_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 1809b16bd75..a9a51c22b7c 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -8,7 +8,7 @@ from aiopurpleair.endpoints.sensors import NearbySensorResult from aiopurpleair.models.sensors import GetSensorsResponse import pytest -from homeassistant.components.purpleair import DOMAIN +from homeassistant.components.purpleair.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -20,7 +20,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: - """Define a fixture to return a mocked aiopurple API object.""" + """Define a fixture to return a mocked aiopurpleair API object.""" return Mock( async_check_api_key=AsyncMock(), get_map_url=Mock(return_value="http://example.com"), diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 998cb2b7878..5ee15de4e6b 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest -from homeassistant.components.purpleair import DOMAIN +from homeassistant.components.purpleair.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -288,6 +288,7 @@ async def test_options_remove_sensor( device_entry = device_registry.async_get_device( identifiers={(DOMAIN, str(TEST_SENSOR_INDEX1))} ) + assert device_entry is not None result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"sensor_device_id": device_entry.id}, diff --git a/tests/components/qbus/conftest.py b/tests/components/qbus/conftest.py index 8268d091bda..f1fd96c321b 100644 --- a/tests/components/qbus/conftest.py +++ b/tests/components/qbus/conftest.py @@ -1,5 +1,7 @@ """Test fixtures for qbus.""" +import json + import pytest from homeassistant.components.qbus.const import CONF_SERIAL_NUMBER, DOMAIN @@ -7,9 +9,13 @@ from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonObjectType -from .const import FIXTURE_PAYLOAD_CONFIG +from .const import FIXTURE_PAYLOAD_CONFIG, TOPIC_CONFIG -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import ( + MockConfigEntry, + async_fire_mqtt_message, + load_json_object_fixture, +) @pytest.fixture @@ -31,3 +37,18 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: def payload_config() -> JsonObjectType: """Return the config topic payload.""" return load_json_object_fixture(FIXTURE_PAYLOAD_CONFIG, DOMAIN) + + +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + payload_config: JsonObjectType, +) -> None: + """Set up the integration.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) + await hass.async_block_till_done() diff --git a/tests/components/qbus/fixtures/payload_config.json b/tests/components/qbus/fixtures/payload_config.json index e2c7f463e4e..fc204c975ad 100644 --- a/tests/components/qbus/fixtures/payload_config.json +++ b/tests/components/qbus/fixtures/payload_config.json @@ -46,7 +46,7 @@ { "id": "UL15", "location": "Media room", - "locationId": 0, + "locationId": 1, "name": "MEDIA ROOM", "originalName": "MEDIA ROOM", "refId": "000001/28", @@ -65,6 +65,40 @@ "write": true } } + }, + { + "id": "UL20", + "location": "Living", + "locationId": 0, + "name": "LIVING TH", + "originalName": "LIVING TH", + "refId": "000001/120", + "type": "thermo", + "actions": {}, + "properties": { + "currRegime": { + "enumValues": ["MANUEEL", "VORST", "ECONOMY", "COMFORT", "NACHT"], + "read": true, + "type": "enumString", + "write": true + }, + "currTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": false + }, + "setTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": true + } + } } ] } diff --git a/tests/components/qbus/test_climate.py b/tests/components/qbus/test_climate.py new file mode 100644 index 00000000000..d521e310984 --- /dev/null +++ b/tests/components/qbus/test_climate.py @@ -0,0 +1,228 @@ +"""Test Qbus light entities.""" + +from datetime import timedelta +from unittest.mock import MagicMock, call + +import pytest + +from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_PRESET_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + ClimateEntity, + HVACAction, + HVACMode, +) +from homeassistant.components.qbus.climate import STATE_REQUEST_DELAY +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.util import dt as dt_util + +from tests.common import async_fire_mqtt_message, async_fire_time_changed +from tests.typing import MqttMockHAClient + +_CURRENT_TEMPERATURE = 21.5 +_SET_TEMPERATURE = 20.5 +_REGIME = "COMFORT" + +_PAYLOAD_CLIMATE_STATE_TEMP = ( + f'{{"id":"UL20","properties":{{"setTemp":{_SET_TEMPERATURE}}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_TEMP_FULL = f'{{"id":"UL20","properties":{{"currRegime":"MANUEEL","currTemp":{_CURRENT_TEMPERATURE},"setTemp":{_SET_TEMPERATURE}}},"type":"state"}}' + +_PAYLOAD_CLIMATE_STATE_PRESET = ( + f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}"}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_PRESET_FULL = f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}","currTemp":{_CURRENT_TEMPERATURE},"setTemp":22.0}},"type":"state"}}' + +_PAYLOAD_CLIMATE_SET_TEMP = f'{{"id": "UL20", "type": "state", "properties": {{"setTemp": {_SET_TEMPERATURE}}}}}' +_PAYLOAD_CLIMATE_SET_PRESET = ( + '{"id": "UL20", "type": "state", "properties": {"currRegime": "COMFORT"}}' +) + +_TOPIC_CLIMATE_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/state" +_TOPIC_CLIMATE_SET_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/setState" +_TOPIC_GET_STATE = "cloudapp/QBUSMQTTGW/getState" + +_CLIMATE_ENTITY_ID = "climate.living_th" + + +async def test_climate( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate temperature & preset.""" + + # Set temperature + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_TEMPERATURE: _SET_TEMPERATURE, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_TEMP, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # Set preset + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: _REGIME, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_PRESET, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == 22.0 + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert entity.state == HVACMode.HEAT + + +async def test_climate_when_invalid_state_received( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate when no valid state is received.""" + + platform: EntityPlatform = hass.data["entity_components"][CLIMATE_DOMAIN] + entity: ClimateEntity = next( + ( + entity + for entity in platform.entities + if entity.entity_id == _CLIMATE_ENTITY_ID + ), + None, + ) + + assert entity + entity.async_schedule_update_ha_state = MagicMock() + + # Simulate state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, "") + await hass.async_block_till_done() + + entity.async_schedule_update_ha_state.assert_not_called() + + +async def test_climate_with_fast_subsequent_changes( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with fast subsequent changes.""" + + # Simulate two subsequent partial state responses + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # State request should be requested only once + _wait_and_assert_state_request(hass, mqtt_mock) + + +async def test_climate_with_unknown_preset( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with passing an unknown preset value.""" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: "What is cooler than being cool?", + }, + blocking=True, + ) + + +def _wait_and_assert_state_request( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient +) -> None: + mqtt_mock.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(STATE_REQUEST_DELAY)) + mqtt_mock.async_publish.assert_has_calls( + [call(_TOPIC_GET_STATE, '["UL20"]', 0, False)], + any_order=True, + ) diff --git a/tests/components/qbus/test_light.py b/tests/components/qbus/test_light.py index c64219f1269..2db2c622289 100644 --- a/tests/components/qbus/test_light.py +++ b/tests/components/qbus/test_light.py @@ -1,7 +1,5 @@ """Test Qbus light entities.""" -import json - from homeassistant.components.light import ( ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN, @@ -10,11 +8,8 @@ from homeassistant.components.light import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient # 186 = 73% (rounded) @@ -44,17 +39,10 @@ _LIGHT_ENTITY_ID = "light.media_room" async def test_light( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/qbus/test_switch.py b/tests/components/qbus/test_switch.py index 83bb667e4eb..ddb63e933da 100644 --- a/tests/components/qbus/test_switch.py +++ b/tests/components/qbus/test_switch.py @@ -1,7 +1,5 @@ """Test Qbus switch entities.""" -import json - from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -9,11 +7,8 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient _PAYLOAD_SWITCH_STATE_ON = '{"id":"UL10","properties":{"value":true},"type":"state"}' @@ -34,17 +29,10 @@ _SWITCH_ENTITY_ID = "switch.living" async def test_switch_turn_on_off( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index 352a2345052..99d6705e4a4 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -87,6 +87,7 @@ async def test_validate_db_schema_fix_float_issue( "created_ts DOUBLE PRECISION", "start_ts DOUBLE PRECISION", "mean DOUBLE PRECISION", + "mean_weight DOUBLE PRECISION", "min DOUBLE PRECISION", "max DOUBLE PRECISION", "last_reset_ts DOUBLE PRECISION", diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 28eb097f576..d381c225275 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -35,7 +35,8 @@ from homeassistant.components.recorder.db_schema import ( StatesMeta, ) from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask -from homeassistant.const import UnitOfTemperature +from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass +from homeassistant.const import DEGREE, UnitOfTemperature from homeassistant.core import Event, HomeAssistant, State from homeassistant.helpers import recorder as recorder_helper from homeassistant.util import dt as dt_util @@ -290,6 +291,7 @@ def record_states( sns2 = "sensor.test2" sns3 = "sensor.test3" sns4 = "sensor.test4" + sns5 = "sensor.wind_direction" sns1_attr = { "device_class": "temperature", "state_class": "measurement", @@ -302,6 +304,11 @@ def record_states( } sns3_attr = {"device_class": "temperature"} sns4_attr = {} + sns5_attr = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, + } def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -315,7 +322,7 @@ def record_states( three = two + timedelta(seconds=30 * 5) four = three + timedelta(seconds=14 * 5) - states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} + states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: [], sns5: []} with freeze_time(one) as freezer: states[mp].append( set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) @@ -324,6 +331,7 @@ def record_states( states[sns2].append(set_state(sns2, "10", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "10", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "10", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "10", attributes=sns5_attr)) freezer.move_to(one + timedelta(microseconds=1)) states[mp].append( @@ -335,12 +343,14 @@ def record_states( states[sns2].append(set_state(sns2, "15", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "15", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "15", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "350", attributes=sns5_attr)) freezer.move_to(three) states[sns1].append(set_state(sns1, "20", attributes=sns1_attr)) states[sns2].append(set_state(sns2, "20", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "20", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "20", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "5", attributes=sns5_attr)) return zero, four, states diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index daa7fb6977c..9c19a1c7405 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -583,6 +583,8 @@ class StatisticsBase: last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) + # *** Not originally in v32, only added for tests. Added in v49 + mean_weight = Column(DOUBLE_TYPE) @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: diff --git a/tests/components/recorder/table_managers/test_statistics_meta.py b/tests/components/recorder/table_managers/test_statistics_meta.py index 66edb84c3ef..1af60b71ed5 100644 --- a/tests/components/recorder/table_managers/test_statistics_meta.py +++ b/tests/components/recorder/table_managers/test_statistics_meta.py @@ -2,10 +2,19 @@ from __future__ import annotations +import logging +import threading + import pytest from homeassistant.components import recorder +from homeassistant.components.recorder.db_schema import StatisticsMeta +from homeassistant.components.recorder.models import ( + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.util import session_scope +from homeassistant.const import DEGREE from homeassistant.core import HomeAssistant from tests.typing import RecorderInstanceGenerator @@ -55,3 +64,78 @@ async def test_unsafe_calls_to_statistics_meta_manager( session, statistic_ids=["light.kitchen"], ) + + +async def test_invalid_mean_types( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test passing invalid mean types will be skipped and logged.""" + instance = await async_setup_recorder_instance( + hass, {recorder.CONF_COMMIT_INTERVAL: 0} + ) + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + valid_metadata: dict[str, tuple[int, StatisticMetaData]] = { + "sensor.energy": ( + 1, + { + "mean_type": StatisticMeanType.NONE, + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.energy", + "unit_of_measurement": "kWh", + }, + ), + "sensor.wind_direction": ( + 2, + { + "mean_type": StatisticMeanType.CIRCULAR, + "has_mean": False, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.wind_direction", + "unit_of_measurement": DEGREE, + }, + ), + "sensor.wind_speed": ( + 3, + { + "mean_type": StatisticMeanType.ARITHMETIC, + "has_mean": True, + "has_sum": False, + "name": "Wind speed", + "source": "recorder", + "statistic_id": "sensor.wind_speed", + "unit_of_measurement": "km/h", + }, + ), + } + manager = instance.statistics_meta_manager + with instance.get_session() as session: + for _, metadata in valid_metadata.values(): + session.add(StatisticsMeta.from_meta(metadata)) + + # Add invalid mean type + session.add( + StatisticsMeta( + statistic_id="sensor.invalid", + source="recorder", + has_sum=False, + name="Invalid", + mean_type=12345, + ) + ) + session.commit() + + # Check that the invalid mean type was skipped + assert manager.get_many(session) == valid_metadata + assert ( + "homeassistant.components.recorder.table_managers.statistics_meta", + logging.WARNING, + "Invalid mean type found for statistic_id: sensor.invalid, mean_type: 12345. Skipping", + ) in caplog.record_tuples diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 012e227c11a..7fd73aaf735 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -1538,6 +1538,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_year_ago).replace(tzinfo=None), @@ -1553,6 +1554,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1568,6 +1570,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_month_ago).replace(tzinfo=None), @@ -1705,6 +1708,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1720,6 +1724,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1735,6 +1740,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1758,6 +1764,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1773,6 +1780,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1788,6 +1796,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1932,6 +1941,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1947,6 +1957,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1962,6 +1973,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1985,6 +1997,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index ed883c5403e..ed754723426 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -12,6 +12,7 @@ from homeassistant.components import recorder from homeassistant.components.recorder import Recorder, history, statistics from homeassistant.components.recorder.db_schema import StatisticsShortTerm from homeassistant.components.recorder.models import ( + StatisticMeanType, datetime_to_timestamp_or_none, process_timestamp, ) @@ -123,32 +124,38 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} - for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): + for kwargs in ({}, {"statistic_ids": ["sensor.test1", "sensor.wind_direction"]}): stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor in ("sensor.test1", "sensor.wind_direction"): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=four) await async_wait_recording_done(hass) - metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"}) - assert metadata["sensor.test1"][1]["has_mean"] is True - assert metadata["sensor.test1"][1]["has_sum"] is False - assert metadata["sensor.test2"][1]["has_mean"] is True - assert metadata["sensor.test2"][1]["has_sum"] is False + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"} + ) + for sensor, mean_type in ( + ("sensor.test1", StatisticMeanType.ARITHMETIC), + ("sensor.test2", StatisticMeanType.ARITHMETIC), + ("sensor.wind_direction", StatisticMeanType.CIRCULAR), + ): + assert metadata[sensor][1]["mean_type"] is mean_type + assert metadata[sensor][1]["has_sum"] is False expected_1 = { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), @@ -168,11 +175,39 @@ async def test_compile_hourly_statistics( expected_stats1 = [expected_1, expected_2] expected_stats2 = [expected_1, expected_2] + expected_stats_wind_direction1 = { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction2 = { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(5), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction = [ + expected_stats_wind_direction1, + expected_stats_wind_direction2, + ] + # Test statistics_during_period stats = statistics_during_period( - hass, zero, period="5minute", statistic_ids={"sensor.test1", "sensor.test2"} + hass, + zero, + period="5minute", + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Test statistics_during_period with a far future start and end date future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00")) @@ -181,7 +216,7 @@ async def test_compile_hourly_statistics( future, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) assert stats == {} @@ -191,9 +226,13 @@ async def test_compile_hourly_statistics( zero, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } stats = statistics_during_period( hass, zero, statistic_ids={"sensor.test2"}, period="5minute" @@ -206,32 +245,39 @@ async def test_compile_hourly_statistics( assert stats == {} # Test get_last_short_term_statistics and get_latest_short_term_statistics - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor, expected in ( + ("sensor.test1", expected_2), + ("sensor.wind_direction", expected_stats_wind_direction2), + ): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} - stats = get_last_short_term_statistics( - hass, - 1, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": [expected_2]} + stats = get_last_short_term_statistics( + hass, + 1, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: [expected]} with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } # Now wipe the latest_short_term_statistics_ids table and test again # to make sure we can rebuild the missing data @@ -241,13 +287,15 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } metadata = get_metadata(hass, statistic_ids={"sensor.test1"}) - with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, @@ -258,23 +306,44 @@ async def test_compile_hourly_statistics( ) assert stats == {"sensor.test1": [expected_2]} - stats = get_last_short_term_statistics( - hass, - 2, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, + # Test with multiple metadata ids + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.wind_direction"} ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + with session_scope(hass=hass, read_only=True) as session: + stats = get_latest_short_term_statistics_with_session( + hass, + session, + {"sensor.test1", "sensor.wind_direction"}, + {"last_reset", "max", "mean", "min", "state", "sum"}, + metadata=metadata, + ) + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } - stats = get_last_short_term_statistics( - hass, - 3, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + for sensor, expected in ( + ("sensor.test1", expected_stats1[::-1]), + ("sensor.wind_direction", expected_stats_wind_direction[::-1]), + ): + stats = get_last_short_term_statistics( + hass, + 2, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} + + stats = get_last_short_term_statistics( + hass, + 3, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} stats = get_last_short_term_statistics( hass, @@ -291,7 +360,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -306,7 +375,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -460,15 +529,35 @@ async def test_rename_entity( expected_stats1 = [expected_1] expected_stats2 = [expected_1] expected_stats99 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99") await async_wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test99": expected_stats99, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } async def test_statistics_during_period_set_back_compat( @@ -544,9 +633,25 @@ async def test_rename_entity_collision( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -567,7 +672,11 @@ async def test_rename_entity_collision( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the safeguard in the states meta manager was hit assert ( @@ -631,9 +740,25 @@ async def test_rename_entity_collision_states_meta_check_disabled( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -660,7 +785,11 @@ async def test_rename_entity_collision_states_meta_check_disabled( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the filter_unique_constraint_integrity_error safeguard was hit assert "Blocked attempt to insert duplicated statistic rows" in caplog.text @@ -786,6 +915,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -800,6 +930,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -876,6 +1007,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy renamed", @@ -890,6 +1022,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy renamed", "source": source, diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index a4e35bc8753..a4e4fe45db1 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -1,11 +1,14 @@ """The tests for sensor recorder platform.""" +from collections.abc import Iterable import datetime from datetime import timedelta +import math from statistics import fmean import sys from unittest.mock import ANY, patch +from _pytest.python_api import ApproxBase from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest @@ -13,7 +16,14 @@ import pytest from homeassistant.components import recorder from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_add_external_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, @@ -24,6 +34,7 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.components.recorder.websocket_api import UNIT_SCHEMA from homeassistant.components.sensor import UNIT_CONVERTERS +from homeassistant.const import DEGREE from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import async_setup_component @@ -247,12 +258,12 @@ async def test_statistics_during_period( @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") @pytest.mark.parametrize("offset", [0, 1, 2]) async def test_statistic_during_period( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - offset, + offset: int, ) -> None: """Test statistic_during_period.""" now = dt_util.utcnow() @@ -307,7 +318,7 @@ async def test_statistic_during_period( ) imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -655,7 +666,7 @@ async def test_statistic_during_period( hass, session, {"sensor.test"}, - {"last_reset", "max", "mean", "min", "state", "sum"}, + {"last_reset", "state", "sum"}, ) start = imported_stats_5min[-1]["start"].timestamp() end = start + (5 * 60) @@ -672,18 +683,376 @@ async def test_statistic_during_period( } +def _circular_mean(values: Iterable[StatisticData]) -> dict[str, float]: + sin_sum = 0 + cos_sum = 0 + for x in values: + mean = x.get("mean") + assert mean is not None + sin_sum += math.sin(mean * DEG_TO_RAD) + cos_sum += math.cos(mean * DEG_TO_RAD) + + return { + "mean": (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360, + "mean_weight": math.sqrt(sin_sum**2 + cos_sum**2), + } + + +def _circular_mean_approx(values: Iterable[StatisticData]) -> ApproxBase: + return pytest.approx(_circular_mean(values)["mean"]) + + +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +@pytest.mark.parametrize("offset", [0, 1, 2]) +async def test_statistic_during_period_circular_mean( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + offset: int, +) -> None: + """Test statistic_during_period.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=offset * 5, second=0, microsecond=0) + timedelta( + hours=-3 + ) + + imported_stats_5min: list[StatisticData] = [ + { + "start": (start + timedelta(minutes=5 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(39) + ] + + imported_stats = [] + slice_end = 12 - offset + imported_stats.append( + { + "start": imported_stats_5min[0]["start"].replace(minute=0), + **_circular_mean(imported_stats_5min[0:slice_end]), + } + ) + for i in range(2): + slice_start = i * 12 + (12 - offset) + slice_end = (i + 1) * 12 + (12 - offset) + assert imported_stats_5min[slice_start]["start"].minute == 0 + imported_stats.append( + { + "start": imported_stats_5min[slice_start]["start"], + **_circular_mean(imported_stats_5min[slice_start:slice_end]), + } + ) + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats_5min, + StatisticsShortTerm, + ) + await async_wait_recording_done(hass) + + metadata = get_metadata(hass, statistic_ids={"sensor.test"}) + metadata_id = metadata["sensor.test"][0] + run_cache = get_short_term_statistics_run_cache(hass) + # Verify the import of the short term statistics + # also updates the run cache + assert run_cache.get_latest_ids({metadata_id}) is not None + + # No data for this period yet + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": now.isoformat(), + "end_time": now.isoformat(), + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "max": None, + "mean": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T07:15:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T08:20:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:09:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:26] + end_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[:26]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:32] (less than a full hour) + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + end_time = ( + dt_util.parse_datetime("2022-10-21T06:40:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[32]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:32]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:] + imported_statistics_5min[36:] + start_time = "2022-10-21T06:00:00+00:00" + assert imported_stats_5min[24 - offset]["start"].isoformat() == start_time + assert imported_stats[2]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics[2:] + imported_statistics_5min[36:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1, "minutes": 25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:3] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1}, + "offset": {"minutes": -25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + slice_start = 24 - offset + slice_end = 36 - offset + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[slice_start:slice_end]), + "max": None, + "min": None, + "change": None, + } + + # Test we can get only selected types + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "types": ["mean"], + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + } + + @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) async def test_statistic_during_period_hole( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" - stat_id = 1 - - def next_id(): - nonlocal stat_id - stat_id += 1 - return stat_id - now = dt_util.utcnow() await async_recorder_block_till_done(hass) @@ -704,7 +1073,7 @@ async def test_statistic_during_period_hole( ] imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -830,6 +1199,156 @@ async def test_statistic_during_period_hole( } +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +async def test_statistic_during_period_hole_circular_mean( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test statistic_during_period when there are holes in the data.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-18) + + imported_stats: list[StatisticData] = [ + { + "start": (start + timedelta(hours=3 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(6) + ] + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + await async_wait_recording_done(hass) + + # This should include imported_stats[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T05:00:00+00:00" + assert imported_stats[0]["start"].isoformat() == start_time + assert imported_stats[-1]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T08:20:00+00:00" + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_stats[1:4] + start_time = "2022-10-20T16:00:00+00:00" + end_time = "2022-10-20T23:00:00+00:00" + assert imported_stats[1]["start"].isoformat() == start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[1:4] + start_time = "2022-10-20T15:00:00+00:00" + end_time = "2022-10-21T00:00:00+00:00" + assert imported_stats[1]["start"].isoformat() > start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + @pytest.mark.parametrize( "frozen_time", [ @@ -897,7 +1416,7 @@ async def test_statistic_during_period_partial_overlap( statId = "sensor.test_overlapping" imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy overlapping", "source": "recorder", @@ -1766,6 +2285,7 @@ async def test_list_statistic_ids( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -1791,6 +2311,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1813,6 +2334,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1838,6 +2360,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1859,6 +2382,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1939,6 +2463,7 @@ async def test_list_statistic_ids_unit_change( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean await async_setup_component(hass, "sensor", {}) @@ -1966,6 +2491,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": statistics_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1987,6 +2513,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2208,6 +2735,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2235,6 +2763,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": new_display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2324,6 +2853,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2375,6 +2905,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2428,6 +2959,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2455,6 +2987,7 @@ async def test_change_statistics_unit_errors( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2774,6 +3307,7 @@ async def test_get_statistics_metadata( """Test get_statistics_metadata.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -2843,6 +3377,7 @@ async def test_get_statistics_metadata( "statistic_id": "test:total_gas", "display_unit_of_measurement": unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": "Total imported energy", "source": "test", @@ -2874,6 +3409,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2901,6 +3437,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2995,6 +3532,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3009,6 +3547,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3213,6 +3752,7 @@ async def test_adjust_sum_statistics_energy( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3227,6 +3767,7 @@ async def test_adjust_sum_statistics_energy( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3406,6 +3947,7 @@ async def test_adjust_sum_statistics_gas( { "display_unit_of_measurement": "m³", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3420,6 +3962,7 @@ async def test_adjust_sum_statistics_gas( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3617,6 +4160,7 @@ async def test_adjust_sum_statistics_errors( { "display_unit_of_measurement": state_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3631,6 +4175,7 @@ async def test_adjust_sum_statistics_errors( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, diff --git a/tests/components/remote_calendar/__init__.py b/tests/components/remote_calendar/__init__.py new file mode 100644 index 00000000000..2ffb157f072 --- /dev/null +++ b/tests/components/remote_calendar/__init__.py @@ -0,0 +1,11 @@ +"""Tests for the Remote Calendar integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/remote_calendar/conftest.py b/tests/components/remote_calendar/conftest.py new file mode 100644 index 00000000000..bf5184bbf54 --- /dev/null +++ b/tests/components/remote_calendar/conftest.py @@ -0,0 +1,89 @@ +"""Fixtures for Remote Calendar.""" + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +import textwrap +from typing import Any +import urllib + +import pytest + +from homeassistant.components.remote_calendar.const import CONF_CALENDAR_NAME, DOMAIN +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + +CALENDAR_NAME = "Home Assistant Events" +TEST_ENTITY = "calendar.home_assistant_events" +CALENDER_URL = "https://some.calendar.com/calendar.ics" +FRIENDLY_NAME = "Home Assistant Events" + + +@pytest.fixture(name="time_zone") +def mock_time_zone() -> str: + """Fixture for time zone to use in tests.""" + # Set our timezone to CST/Regina so we can check calculations + # This keeps UTC-6 all year round + return "America/Regina" + + +@pytest.fixture(autouse=True) +async def set_time_zone(hass: HomeAssistant, time_zone: str): + """Set the time zone for the tests.""" + # Set our timezone to CST/Regina so we can check calculations + # This keeps UTC-6 all year round + await hass.config.async_set_time_zone(time_zone) + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Fixture for mock configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_CALENDAR_NAME: CALENDAR_NAME, CONF_URL: CALENDER_URL} + ) + + +type GetEventsFn = Callable[[str, str], Awaitable[list[dict[str, Any]]]] + + +@pytest.fixture(name="get_events") +def get_events_fixture(hass_client: ClientSessionGenerator) -> GetEventsFn: + """Fetch calendar events from the HTTP API.""" + + async def _fetch(start: str, end: str) -> list[dict[str, Any]]: + client = await hass_client() + response = await client.get( + f"/api/calendars/{TEST_ENTITY}?start={urllib.parse.quote(start)}&end={urllib.parse.quote(end)}" + ) + assert response.status == HTTPStatus.OK + return await response.json() + + return _fetch + + +def event_fields(data: dict[str, str]) -> dict[str, str]: + """Filter event API response to minimum fields.""" + return { + k: data[k] + for k in ("summary", "start", "end", "recurrence_id", "location") + if data.get(k) + } + + +@pytest.fixture(name="ics_content") +def mock_ics_content(request: pytest.FixtureRequest) -> str: + """Fixture to allow tests to set initial ics content for the calendar store.""" + default_content = textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + """ + ) + return request.param if hasattr(request, "param") else default_content diff --git a/tests/components/remote_calendar/snapshots/test_diagnostics.ambr b/tests/components/remote_calendar/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..de955f8a2aa --- /dev/null +++ b/tests/components/remote_calendar/snapshots/test_diagnostics.ambr @@ -0,0 +1,17 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'ics': ''' + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:*** + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + ''', + 'now': '2023-06-04T18:00:00-06:00', + 'system_timezone': 'tzlocal()', + 'timezone': 'America/Regina', + }) +# --- diff --git a/tests/components/remote_calendar/test_calendar.py b/tests/components/remote_calendar/test_calendar.py new file mode 100644 index 00000000000..6ae817321c3 --- /dev/null +++ b/tests/components/remote_calendar/test_calendar.py @@ -0,0 +1,394 @@ +"""Tests for calendar platform of Remote Calendar.""" + +from datetime import datetime +import textwrap + +from httpx import Response +import pytest +import respx + +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import ( + CALENDER_URL, + FRIENDLY_NAME, + TEST_ENTITY, + GetEventsFn, + event_fields, +) + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_empty_calendar( + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_events: GetEventsFn, +) -> None: + """Test querying the API and fetching events.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """BEGIN:VCALENDAR + VERSION:2.0 + PRODID:-//hacksw/handcal//NONSGML v1.0//EN + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("1997-07-14T00:00:00", "1997-07-16T00:00:00") + assert len(events) == 0 + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_OFF + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + } + + +@pytest.mark.parametrize( + "ics_content", + [ + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=Europe/Berlin:19970714T190000 + DTEND;TZID=Europe/Berlin:19970715T060000 + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=America/Regina:19970714T110000 + DTEND;TZID=America/Regina:19970714T220000 + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=America/Los_Angeles:19970714T100000 + DTEND;TZID=America/Los_Angeles:19970714T210000 + END:VEVENT + END:VCALENDAR + """ + ), + ], +) +@respx.mock +async def test_api_date_time_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, + ics_content: str, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("1997-07-14T00:00:00Z", "1997-07-16T00:00:00Z") + assert list(map(event_fields, events)) == [ + { + "summary": "Bastille Day Party", + "start": {"dateTime": "1997-07-14T11:00:00-06:00"}, + "end": {"dateTime": "1997-07-14T22:00:00-06:00"}, + } + ] + + # Query events in UTC + + # Time range before event + events = await get_events("1997-07-13T00:00:00Z", "1997-07-14T16:00:00Z") + assert len(events) == 0 + # Time range after event + events = await get_events("1997-07-15T05:00:00Z", "1997-07-15T06:00:00Z") + assert len(events) == 0 + + # Overlap with event start + events = await get_events("1997-07-13T00:00:00Z", "1997-07-14T18:00:00Z") + assert len(events) == 1 + # Overlap with event end + events = await get_events("1997-07-15T03:00:00Z", "1997-07-15T06:00:00Z") + assert len(events) == 1 + + # Query events overlapping with start and end but in another timezone + events = await get_events("1997-07-12T23:00:00-01:00", "1997-07-14T17:00:00-01:00") + assert len(events) == 1 + events = await get_events("1997-07-15T02:00:00-01:00", "1997-07-15T05:00:00-01:00") + assert len(events) == 1 + + +@respx.mock +async def test_api_date_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date all day event.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("2007-06-20T00:00:00", "2007-07-20T00:00:00") + assert list(map(event_fields, events)) == [ + { + "summary": "Festival International de Jazz de Montreal", + "start": {"date": "2007-06-28"}, + "end": {"date": "2007-07-09"}, + } + ] + + # Time range before event (timezone is -6) + events = await get_events("2007-06-26T00:00:00Z", "2007-06-28T01:00:00Z") + assert len(events) == 0 + # Time range after event + events = await get_events("2007-07-10T00:00:00Z", "2007-07-11T00:00:00Z") + assert len(events) == 0 + + # Overlap with event start (timezone is -6) + events = await get_events("2007-06-26T00:00:00Z", "2007-06-28T08:00:00Z") + assert len(events) == 1 + # Overlap with event end + events = await get_events("2007-07-09T00:00:00Z", "2007-07-11T00:00:00Z") + assert len(events) == 1 + + +@pytest.mark.freeze_time(datetime(2007, 6, 28, 12)) +@respx.mock +async def test_active_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + LOCATION:Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_ON + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + "message": "Festival International de Jazz de Montreal", + "all_day": True, + "description": "", + "location": "Montreal", + "start_time": "2007-06-28 00:00:00", + "end_time": "2007-07-09 00:00:00", + } + + +@pytest.mark.freeze_time(datetime(2007, 6, 27, 12)) +@respx.mock +async def test_upcoming_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + LOCATION:Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_OFF + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + "message": "Festival International de Jazz de Montreal", + "all_day": True, + "description": "", + "location": "Montreal", + "start_time": "2007-06-28 00:00:00", + "end_time": "2007-07-09 00:00:00", + } + + +@respx.mock +async def test_recurring_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a recurrence rule.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + DTSTART:20220829T090000 + DTEND:20220829T100000 + SUMMARY:Monday meeting + RRULE:FREQ=WEEKLY;BYDAY=MO + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + + events = await get_events("2022-08-20T00:00:00", "2022-09-20T00:00:00") + assert list(map(event_fields, events)) == [ + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-08-29T09:00:00-06:00"}, + "end": {"dateTime": "2022-08-29T10:00:00-06:00"}, + "recurrence_id": "20220829T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-05T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-05T10:00:00-06:00"}, + "recurrence_id": "20220905T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-12T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-12T10:00:00-06:00"}, + "recurrence_id": "20220912T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-19T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-19T10:00:00-06:00"}, + "recurrence_id": "20220919T090000", + }, + ] + + +@respx.mock +@pytest.mark.parametrize( + ("time_zone", "event_order"), + [ + ("America/Los_Angeles", ["One", "Two", "All Day Event"]), + ("America/Regina", ["One", "Two", "All Day Event"]), + ("UTC", ["One", "All Day Event", "Two"]), + ("Asia/Tokyo", ["All Day Event", "One", "Two"]), + ], +) +async def test_all_day_iter_order( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, + event_order: list[str], +) -> None: + """Test the sort order of an all day events depending on the time zone.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + + BEGIN:VEVENT + DTSTART:20221008 + DTEND:20221009 + SUMMARY:All Day Event + END:VEVENT + + BEGIN:VEVENT + DTSTART:20221007T230000Z + DTEND:20221008T233000Z + SUMMARY:One + END:VEVENT + + BEGIN:VEVENT + DTSTART:20221008T010000Z + DTEND:20221008T020000Z + SUMMARY:Two + END:VEVENT + + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + + events = await get_events("2022-10-06T00:00:00Z", "2022-10-09T00:00:00Z") + assert [event["summary"] for event in events] == event_order diff --git a/tests/components/remote_calendar/test_config_flow.py b/tests/components/remote_calendar/test_config_flow.py new file mode 100644 index 00000000000..626bc2c6e03 --- /dev/null +++ b/tests/components/remote_calendar/test_config_flow.py @@ -0,0 +1,276 @@ +"""Test the Remote Calendar config flow.""" + +from httpx import ConnectError, Response, UnsupportedProtocol +import pytest +import respx + +from homeassistant.components.remote_calendar.const import CONF_CALENDAR_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration +from .conftest import CALENDAR_NAME, CALENDER_URL + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_form_import_ics(hass: HomeAssistant, ics_content: str) -> None: + """Test we get the import form.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == CALENDAR_NAME + assert result2["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@pytest.mark.parametrize( + ("side_effect"), + [ + ConnectError("Connection failed"), + UnsupportedProtocol("Unsupported protocol"), + ], +) +@respx.mock +async def test_form_inavild_url( + hass: HomeAssistant, + side_effect: Exception, + ics_content: str, +) -> None: + """Test we get the import form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get("invalid-url.com").mock(side_effect=side_effect) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "invalid-url.com", + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@pytest.mark.parametrize( + ("url", "log_message"), + [ + ( + "unsupported://protocol.com", # Test for httpx.UnsupportedProtocol + "Request URL has an unsupported protocol 'unsupported://'", + ), + ( + "invalid-url", # Test for httpx.ProtocolError + "Request URL is missing an 'http://' or 'https://' protocol", + ), + ( + "https://example.com:abc/", # Test for httpx.InvalidURL + "Invalid port: 'abc'", + ), + ], +) +async def test_unsupported_inputs( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, url: str, log_message: str +) -> None: + """Test that an unsupported inputs results in a form error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: url, + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + assert log_message in caplog.text + ## It's not possible to test a successful config flow because, we need to mock httpx.get here + ## and then the exception isn't raised anymore. + + +@respx.mock +async def test_form_http_status_error(hass: HomeAssistant, ics_content: str) -> None: + """Test we http status.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=403, + ) + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@respx.mock +async def test_no_valid_calendar(hass: HomeAssistant, ics_content: str) -> None: + """Test invalid ics content.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text="blabla", + ) + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_ics_file"} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +async def test_duplicate_name( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test two calendars cannot be added with the same name.""" + + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "http://other-calendar.com", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_duplicate_url( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test two calendars cannot be added with the same url.""" + + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: "new name", + CONF_URL: CALENDER_URL, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" diff --git a/tests/components/remote_calendar/test_diagnostics.py b/tests/components/remote_calendar/test_diagnostics.py new file mode 100644 index 00000000000..428369b1180 --- /dev/null +++ b/tests/components/remote_calendar/test_diagnostics.py @@ -0,0 +1,39 @@ +"""Test the remote calendar diagnostics.""" + +import datetime + +from httpx import Response +import pytest +import respx +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import CALENDER_URL + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@respx.mock +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, + ics_content: str, +) -> None: + """Test config entry diagnostics.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + await hass.async_block_till_done() + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot diff --git a/tests/components/remote_calendar/test_init.py b/tests/components/remote_calendar/test_init.py new file mode 100644 index 00000000000..f4ca500b2e1 --- /dev/null +++ b/tests/components/remote_calendar/test_init.py @@ -0,0 +1,86 @@ +"""Tests for init platform of Remote Calendar.""" + +from httpx import ConnectError, Response, UnsupportedProtocol +import pytest +import respx + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import CALENDER_URL, TEST_ENTITY + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_load_unload( + hass: HomeAssistant, config_entry: MockConfigEntry, ics_content: str +) -> None: + """Test loading and unloading a config entry.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.LOADED + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@respx.mock +async def test_raise_for_status( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update failed using respx to simulate HTTP exceptions.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=403, + ) + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + "side_effect", + [ + ConnectError("Connection failed"), + UnsupportedProtocol("Unsupported protocol"), + ValueError("Invalid response"), + ], +) +@respx.mock +async def test_update_failed( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, +) -> None: + """Test update failed using respx to simulate different exceptions.""" + respx.get(CALENDER_URL).mock(side_effect=side_effect) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@respx.mock +async def test_calendar_parse_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test CalendarParseError using respx.""" + respx.get(CALENDER_URL).mock( + return_value=Response(status_code=200, text="not a calendar") + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 5af55b48dda..21acced3d1d 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -10,6 +10,7 @@ from reolink_aio.exceptions import ReolinkError from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL from homeassistant.components.reolink.const import ( + CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN, @@ -34,6 +35,7 @@ TEST_PASSWORD = "password" TEST_PASSWORD2 = "new_password" TEST_MAC = "aa:bb:cc:dd:ee:ff" TEST_MAC2 = "ff:ee:dd:cc:bb:aa" +TEST_MAC_CAM = "11:22:33:44:55:66" DHCP_FORMATTED_MAC = "aabbccddeeff" TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" @@ -48,6 +50,7 @@ TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" TEST_DUO_MODEL = "Reolink Duo PoE" TEST_PRIVACY = True +TEST_BC_PORT = 5678 @pytest.fixture @@ -132,17 +135,28 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] host_mock.auto_track_method.return_value = 3 host_mock.daynight_state.return_value = "Black&White" + host_mock.hub_alarm_tone_id.return_value = 1 + host_mock.hub_visitor_tone_id.return_value = 1 # Baichuan host_mock.baichuan = create_autospec(Baichuan) # Disable tcp push by default for tests + host_mock.baichuan.port = TEST_BC_PORT host_mock.baichuan.events_active = False + host_mock.baichuan.mac_address.return_value = TEST_MAC_CAM host_mock.baichuan.privacy_mode.return_value = False + host_mock.baichuan.day_night_state.return_value = "day" host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + host_mock.baichuan.active_scene = "off" + host_mock.baichuan.scene_names = ["off", "home"] host_mock.baichuan.abilities = { 0: {"chnID": 0, "aitype": 34615}, "Host": {"pushAlarm": 7}, } + host_mock.baichuan.smart_location_list.return_value = [0] + host_mock.baichuan.smart_ai_type_list.return_value = ["people"] + host_mock.baichuan.smart_ai_index.return_value = 1 + host_mock.baichuan.smart_ai_name.return_value = "zone1" yield host_mock_class @@ -175,6 +189,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index f8d5318e9bd..5eb80d16356 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -62,6 +62,10 @@ 0, ]), 'cmd list': dict({ + '296': dict({ + '0': 1, + 'null': 1, + }), 'DingDongOpt': dict({ '0': 2, 'null': 2, @@ -166,6 +170,9 @@ '0': 1, 'null': 2, }), + 'GetScene': dict({ + 'null': 1, + }), 'GetStateLight': dict({ 'null': 1, }), diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index 71318c27b25..99c9efba002 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -51,6 +51,32 @@ async def test_motion_sensor( assert hass.states.get(entity_id).state == STATE_ON +async def test_smart_ai_sensor( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test smart ai binary sensor entity.""" + reolink_connect.model = TEST_HOST_MODEL + reolink_connect.baichuan.smart_ai_state.return_value = True + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_crossline_zone1_person" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.baichuan.smart_ai_state.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + async def test_tcp_callback( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 4fe671f8cca..e706af0d067 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -19,6 +19,7 @@ from homeassistant import config_entries from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL from homeassistant.components.reolink.const import ( + CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN, @@ -40,6 +41,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from .conftest import ( DHCP_FORMATTED_MAC, + TEST_BC_PORT, TEST_HOST, TEST_HOST2, TEST_MAC, @@ -88,6 +90,7 @@ async def test_config_flow_manual_success( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -140,6 +143,7 @@ async def test_config_flow_privacy_success( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -290,6 +294,7 @@ async def test_config_flow_errors( CONF_HOST: TEST_HOST, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, ) @@ -302,6 +307,7 @@ async def test_config_flow_errors( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -322,6 +328,7 @@ async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: "rtsp", @@ -360,6 +367,7 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -405,6 +413,7 @@ async def test_reauth_abort_unique_id_mismatch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -474,6 +483,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -496,6 +506,7 @@ async def test_dhcp_ip_update_aborted_if_wrong_mac( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -536,6 +547,7 @@ async def test_dhcp_ip_update_aborted_if_wrong_mac( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -593,6 +605,7 @@ async def test_dhcp_ip_update( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -635,6 +648,7 @@ async def test_dhcp_ip_update( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -671,6 +685,7 @@ async def test_dhcp_ip_update_ingnored_if_still_connected( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -702,6 +717,7 @@ async def test_dhcp_ip_update_ingnored_if_still_connected( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -731,6 +747,7 @@ async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> Non CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -777,6 +794,7 @@ async def test_reconfig_abort_unique_id_mismatch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 28d8c542f4f..4c4908dca6f 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -19,10 +19,14 @@ from homeassistant.components.reolink import ( FIRMWARE_UPDATE_INTERVAL, NUM_CRED_ERRORS, ) -from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.reolink.const import CONF_BC_PORT, DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, @@ -35,16 +39,25 @@ from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, ) +from homeassistant.helpers.device_registry import format_mac from homeassistant.setup import async_setup_component from .conftest import ( + CONF_SUPPORTS_PRIVACY_MODE, + CONF_USE_HTTPS, + DEFAULT_PROTOCOL, + TEST_BC_PORT, TEST_CAM_MODEL, + TEST_HOST, TEST_HOST_MODEL, TEST_MAC, TEST_NVR_NAME, TEST_PORT, + TEST_PRIVACY, TEST_UID, TEST_UID_CAM, + TEST_USE_HTTPS, + TEST_USERNAME, ) from tests.common import MockConfigEntry, async_fire_time_changed @@ -722,6 +735,41 @@ async def test_firmware_repair_issue( await hass.async_block_till_done() assert (DOMAIN, "firmware_update_host") in issue_registry.issues + reolink_connect.camera_sw_version_update_required.return_value = False + + +async def test_password_too_long_repair_issue( + hass: HomeAssistant, + reolink_connect: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test password too long issue is raised.""" + reolink_connect.valid_password.return_value = False + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: "too_longgggggggggggggggggggggggggggggggggggggggggggggggggg", + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + DOMAIN, + f"password_too_long_{config_entry.entry_id}", + ) in issue_registry.issues + reolink_connect.valid_password.return_value = True async def test_new_device_discovered( @@ -762,6 +810,21 @@ async def test_port_changed( assert config_entry.data[CONF_PORT] == 4567 +async def test_baichuan_port_changed( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test config_entry baichuan port update when it has changed during initial login.""" + assert config_entry.data[CONF_BC_PORT] == TEST_BC_PORT + reolink_connect.baichuan.port = 8901 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data[CONF_BC_PORT] == 8901 + + async def test_privacy_mode_on( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index a5a34514598..7044ea53671 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -15,7 +15,7 @@ from homeassistant.components.media_source import ( async_resolve_media, ) from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN +from homeassistant.components.reolink.const import CONF_BC_PORT, CONF_USE_HTTPS, DOMAIN from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN from homeassistant.const import ( CONF_HOST, @@ -31,6 +31,7 @@ from homeassistant.helpers.device_registry import format_mac from homeassistant.setup import async_setup_component from .conftest import ( + TEST_BC_PORT, TEST_HOST2, TEST_HOST_MODEL, TEST_MAC2, @@ -348,6 +349,7 @@ async def test_browsing_not_loaded( CONF_PASSWORD: TEST_PASSWORD2, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py index c6507fa36c1..dd70376d658 100644 --- a/tests/components/reolink/test_number.py +++ b/tests/components/reolink/test_number.py @@ -67,6 +67,48 @@ async def test_number( reolink_connect.set_volume.reset_mock(side_effect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_smart_ai_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with smart ai sensitivity.""" + reolink_connect.baichuan.smart_ai_sensitivity.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_AI_crossline_zone1_sensitivity" + + assert hass.states.get(entity_id).state == "80" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + reolink_connect.baichuan.set_smart_ai.assert_called_with( + 0, "crossline", 0, sensitivity=50 + ) + + reolink_connect.baichuan.set_smart_ai.side_effect = InvalidParameterError( + "Test error" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.baichuan.set_smart_ai.reset_mock(side_effect=True) + + async def test_host_number( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 7910174380a..32bc5e4435e 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -104,6 +104,58 @@ async def test_play_quick_reply_message( reolink_connect.quick_reply_dict = MagicMock() +async def test_host_scene_select( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host select entity with scene mode.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_scene_mode" + assert hass.states.get(entity_id).state == "off" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + reolink_connect.baichuan.set_scene.assert_called_once() + + reolink_connect.baichuan.set_scene.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + + reolink_connect.baichuan.set_scene.side_effect = InvalidParameterError("Test error") + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + + reolink_connect.baichuan.active_scene = "Invalid value" + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + reolink_connect.baichuan.set_scene.reset_mock(side_effect=True) + reolink_connect.baichuan.active_scene = "off" + + async def test_chime_select( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py index a6cfe862963..d48362516b8 100644 --- a/tests/components/reolink/test_update.py +++ b/tests/components/reolink/test_update.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ReolinkError +from reolink_aio.exceptions import ApiError, ReolinkError from reolink_aio.software_version import NewSoftwareVersion from homeassistant.components.reolink.update import POLL_AFTER_INSTALL, POLL_PROGRESS @@ -144,6 +144,17 @@ async def test_update_firm( blocking=True, ) + reolink_connect.update_firmware.side_effect = ApiError( + "Test error", translation_key="firmware_rate_limit" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + # test _async_update_future reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" reolink_connect.firmware_update_available.return_value = False diff --git a/tests/components/reolink/test_util.py b/tests/components/reolink/test_util.py index f66f4682b98..ef66d471801 100644 --- a/tests/components/reolink/test_util.py +++ b/tests/components/reolink/test_util.py @@ -38,51 +38,59 @@ from tests.common import MockConfigEntry [ ( ApiError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="api_error"), + ), + ( + ApiError("Test error", translation_key="firmware_rate_limit"), + HomeAssistantError(translation_key="firmware_rate_limit"), + ), + ( + ApiError("Test error", translation_key="not_in_strings.json"), + HomeAssistantError(translation_key="api_error"), ), ( CredentialsInvalidError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_credentials"), ), ( InvalidContentTypeError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_content_type"), ), ( InvalidParameterError("Test error"), - ServiceValidationError, + ServiceValidationError(translation_key="invalid_parameter"), ), ( LoginError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="login_error"), ), ( NoDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="no_data"), ), ( NotSupportedError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="not_supported"), ), ( ReolinkConnectionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="connection_error"), ), ( ReolinkError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected"), ), ( ReolinkTimeoutError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="timeout"), ), ( SubscriptionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="subscription_error"), ), ( UnexpectedDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected_data"), ), ], ) @@ -91,7 +99,7 @@ async def test_try_function( config_entry: MockConfigEntry, reolink_connect: MagicMock, side_effect: ReolinkError, - expected: Exception, + expected: HomeAssistantError, ) -> None: """Test try_function error translations using number entity.""" reolink_connect.volume.return_value = 80 @@ -104,7 +112,7 @@ async def test_try_function( entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" reolink_connect.set_volume.side_effect = side_effect - with pytest.raises(expected): + with pytest.raises(expected.__class__) as err: await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -112,4 +120,6 @@ async def test_try_function( blocking=True, ) + assert err.value.translation_key == expected.translation_key + reolink_connect.set_volume.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_views.py b/tests/components/reolink/test_views.py index c994cc59c5d..3521de072b6 100644 --- a/tests/components/reolink/test_views.py +++ b/tests/components/reolink/test_views.py @@ -46,8 +46,12 @@ def get_mock_session( mock_response = Mock() mock_response.content_length = content_length + mock_response.headers = {} + mock_response.status = 200 + mock_response.reason = "OK" mock_response.content_type = content_type mock_response.content.iter_chunked = Mock(return_value=content) + mock_response.text = AsyncMock(return_value="test") mock_session = Mock() mock_session.get = AsyncMock(return_value=mock_response) @@ -178,16 +182,18 @@ async def test_playback_proxy_timeout( assert response.status == 200 +@pytest.mark.parametrize(("content_type"), [("video/x-flv"), ("text/html")]) async def test_playback_wrong_content( hass: HomeAssistant, reolink_connect: MagicMock, config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, + content_type: str, ) -> None: """Test playback proxy URL with a wrong content type in the response.""" reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE_MP4, TEST_URL) - mock_session = get_mock_session(content_type="video/x-flv") + mock_session = get_mock_session(content_type=content_type) with patch( "homeassistant.components.reolink.views.async_get_clientsession", diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index 9b3a6633c62..1ec2b00263f 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -3,10 +3,9 @@ from collections.abc import Generator from copy import deepcopy import pathlib -import shutil +import tempfile from typing import Any from unittest.mock import Mock, patch -import uuid import pytest from roborock import RoborockCategory, RoomMapping @@ -19,7 +18,6 @@ from homeassistant.components.roborock.const import ( CONF_USER_DATA, DOMAIN, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_USERNAME, Platform from homeassistant.core import HomeAssistant @@ -80,6 +78,9 @@ def bypass_api_client_fixture() -> None: "homeassistant.components.roborock.RoborockApiClient.get_scenes", return_value=SCENES, ), + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.load_multi_map" + ), ): yield @@ -110,7 +111,7 @@ def bypass_api_fixture(bypass_api_client_fixture: Any) -> None: return_value=MULTI_MAP_LIST, ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=MAP_DATA, ), patch( @@ -127,7 +128,7 @@ def bypass_api_fixture(bypass_api_client_fixture: Any) -> None: "roborock.version_1_apis.AttributeCache.value", ), patch( - "homeassistant.components.roborock.image.MAP_SLEEP", + "homeassistant.components.roborock.coordinator.MAP_SLEEP", 0, ), patch( @@ -215,7 +216,6 @@ async def setup_entry( hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, platforms: list[Platform], ) -> Generator[MockConfigEntry]: """Set up the Roborock platform.""" @@ -225,21 +225,18 @@ async def setup_entry( yield mock_roborock_entry -@pytest.fixture -async def cleanup_map_storage( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry +@pytest.fixture(autouse=True, name="storage_path") +async def storage_path_fixture( + hass: HomeAssistant, ) -> Generator[pathlib.Path]: """Test cleanup, remove any map storage persisted during the test.""" - tmp_path = str(uuid.uuid4()) - with patch( - "homeassistant.components.roborock.roborock_storage.STORAGE_PATH", new=tmp_path - ): - storage_path = ( - pathlib.Path(hass.config.path(tmp_path)) / mock_roborock_entry.entry_id - ) - yield storage_path - # We need to first unload the config entry because unloading it will - # persist any unsaved maps to storage. - if mock_roborock_entry.state is ConfigEntryState.LOADED: - await hass.config_entries.async_unload(mock_roborock_entry.entry_id) - shutil.rmtree(str(storage_path), ignore_errors=True) + with tempfile.TemporaryDirectory() as tmp_path: + + def get_storage_path(_: HomeAssistant, entry_id: str) -> pathlib.Path: + return pathlib.Path(tmp_path) / entry_id + + with patch( + "homeassistant.components.roborock.roborock_storage._storage_path_prefix", + new=get_storage_path, + ): + yield pathlib.Path(tmp_path) diff --git a/tests/components/roborock/mock_data.py b/tests/components/roborock/mock_data.py index 59c54892687..82b51e67f8d 100644 --- a/tests/components/roborock/mock_data.py +++ b/tests/components/roborock/mock_data.py @@ -1120,7 +1120,10 @@ PROP = DeviceProp( ) NETWORK_INFO = NetworkInfo( - ip="123.232.12.1", ssid="wifi", mac="ac:cc:cc:cc:cc", bssid="bssid", rssi=90 + ip="123.232.12.1", ssid="wifi", mac="ac:cc:cc:cc:cc:cc", bssid="bssid", rssi=90 +) +NETWORK_INFO_2 = NetworkInfo( + ip="123.232.12.2", ssid="wifi", mac="ac:cc:cc:cc:cd:cc", bssid="bssid", rssi=90 ) MULTI_MAP_LIST = MultiMapsList.from_dict( @@ -1151,6 +1154,7 @@ MAP_DATA = MapData(0, 0) MAP_DATA.image = ImageData( 100, 10, 10, 10, 10, ImageConfig(), Image.new("RGB", (1, 1)), lambda p: p ) +MAP_DATA.vacuum_room = 17 SCENES = [ diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index 13bc23e6e2b..441974dc15d 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -19,12 +19,19 @@ from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN, DRA from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .mock_data import MOCK_CONFIG, USER_DATA, USER_EMAIL +from .mock_data import MOCK_CONFIG, NETWORK_INFO, USER_DATA, USER_EMAIL from tests.common import MockConfigEntry +@pytest.fixture +def cleanup_map_storage(): + """Override the map storage fixture as it is not relevant here.""" + return + + async def test_config_flow_success( hass: HomeAssistant, bypass_api_fixture, @@ -189,25 +196,31 @@ async def test_config_flow_failures_code_login( async def test_options_flow_drawables( - hass: HomeAssistant, setup_entry: MockConfigEntry + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry ) -> None: """Test that the options flow works.""" - result = await hass.config_entries.options.async_init(setup_entry.entry_id) - - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == DRAWABLES - with patch( - "homeassistant.components.roborock.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={Drawable.PREDICTED_PATH: True}, - ) + with patch("homeassistant.components.roborock.roborock_storage"): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) await hass.async_block_till_done() - assert result["type"] == FlowResultType.CREATE_ENTRY - assert setup_entry.options[DRAWABLES][Drawable.PREDICTED_PATH] is True - assert len(mock_setup.mock_calls) == 1 + result = await hass.config_entries.options.async_init( + mock_roborock_entry.entry_id + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == DRAWABLES + with patch( + "homeassistant.components.roborock.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={Drawable.PREDICTED_PATH: True}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert mock_roborock_entry.options[DRAWABLES][Drawable.PREDICTED_PATH] is True + assert len(mock_setup.mock_calls) == 1 async def test_reauth_flow( @@ -269,3 +282,67 @@ async def test_account_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured_account" + + +async def test_discovery_not_setup( + hass: HomeAssistant, + bypass_api_fixture, +) -> None: + """Handle the config flow and make sure it succeeds.""" + with ( + patch("homeassistant.components.roborock.async_setup_entry", return_value=True), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip=NETWORK_INFO.ip, + macaddress=NETWORK_INFO.mac.replace(":", ""), + hostname="roborock-vacuum-a72", + ), + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_USERNAME: USER_EMAIL} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "code" + assert result["errors"] == {} + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.code_login", + return_value=USER_DATA, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_ENTRY_CODE: "123456"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_EMAIL + assert result["data"] == MOCK_CONFIG + assert result["result"] + + +async def test_discovery_already_setup( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, +) -> None: + """Handle aborting if the device is already setup.""" + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip=NETWORK_INFO.ip, + macaddress=NETWORK_INFO.mac.replace(":", ""), + hostname="roborock-vacuum-a72", + ), + ) + + assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/roborock/test_coordinator.py b/tests/components/roborock/test_coordinator.py new file mode 100644 index 00000000000..94976ba92f5 --- /dev/null +++ b/tests/components/roborock/test_coordinator.py @@ -0,0 +1,107 @@ +"""Test Roborock Coordinator specific logic.""" + +import copy +from datetime import timedelta +from unittest.mock import patch + +import pytest +from roborock.exceptions import RoborockException + +from homeassistant.components.roborock.const import ( + V1_CLOUD_IN_CLEANING_INTERVAL, + V1_CLOUD_NOT_CLEANING_INTERVAL, + V1_LOCAL_IN_CLEANING_INTERVAL, + V1_LOCAL_NOT_CLEANING_INTERVAL, +) +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from .mock_data import PROP + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.parametrize( + ("interval", "in_cleaning"), + [ + (V1_CLOUD_IN_CLEANING_INTERVAL, 1), + (V1_CLOUD_NOT_CLEANING_INTERVAL, 0), + ], +) +async def test_dynamic_cloud_scan_interval( + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, + interval: timedelta, + in_cleaning: int, +) -> None: + """Test dynamic scan interval.""" + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = in_cleaning + with ( + # Force the system to use the cloud api. + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.ping", + side_effect=RoborockException(), + ), + patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_prop", + return_value=prop, + ), + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + prop = copy.deepcopy(prop) + prop.status.battery = 20 + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_prop", + return_value=prop, + ): + async_fire_time_changed( + hass, dt_util.utcnow() + interval - timedelta(seconds=5) + ) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + async_fire_time_changed(hass, dt_util.utcnow() + interval) + + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20" + + +@pytest.mark.parametrize( + ("interval", "in_cleaning"), + [ + (V1_LOCAL_IN_CLEANING_INTERVAL, 1), + (V1_LOCAL_NOT_CLEANING_INTERVAL, 0), + ], +) +async def test_dynamic_local_scan_interval( + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, + interval: timedelta, + in_cleaning: int, +) -> None: + """Test dynamic scan interval.""" + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = in_cleaning + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + prop = copy.deepcopy(prop) + prop.status.battery = 20 + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ): + async_fire_time_changed( + hass, dt_util.utcnow() + interval - timedelta(seconds=5) + ) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + + async_fire_time_changed(hass, dt_util.utcnow() + interval) + + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20" diff --git a/tests/components/roborock/test_image.py b/tests/components/roborock/test_image.py index fd6c8b2796a..b7c811e0ce2 100644 --- a/tests/components/roborock/test_image.py +++ b/tests/components/roborock/test_image.py @@ -3,7 +3,6 @@ import copy from datetime import timedelta from http import HTTPStatus -import io from unittest.mock import patch from PIL import Image @@ -12,6 +11,7 @@ from roborock import RoborockException from vacuum_map_parser_base.map_data import ImageConfig, ImageData from homeassistant.components.roborock import DOMAIN +from homeassistant.components.roborock.const import V1_LOCAL_NOT_CLEANING_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -63,20 +63,26 @@ async def test_floorplan_image( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", - return_value=new_map_data, + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + return_value=MAP_DATA, ) as parse_map, ): + # This should call parse_map twice as the both devices are in cleaning. async_fire_time_changed(hass, now) - await hass.async_block_till_done() resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + resp = await client.get("/api/image_proxy/image.roborock_s7_2_upstairs") + assert resp.status == HTTPStatus.OK + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_downstairs") assert resp.status == HTTPStatus.OK body = await resp.read() assert body is not None - assert parse_map.call_count == 1 + + assert parse_map.call_count == 2 async def test_floorplan_image_failed_parse( @@ -92,10 +98,11 @@ async def test_floorplan_image_failed_parse( # Copy the device prop so we don't override it prop = copy.deepcopy(PROP) prop.status.in_cleaning = 1 + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state # Update image, but get none for parse image. with ( patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), patch( @@ -103,45 +110,16 @@ async def test_floorplan_image_failed_parse( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), ): async_fire_time_changed(hass, now) resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - assert not resp.ok - - -async def test_load_stored_image( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - setup_entry: MockConfigEntry, -) -> None: - """Test that we correctly load an image from storage when it already exists.""" - img_byte_arr = io.BytesIO() - MAP_DATA.image.data.save(img_byte_arr, format="PNG") - img_bytes = img_byte_arr.getvalue() - - # Load the image on demand, which should queue it to be cached on disk - client = await hass_client() - resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - assert resp.status == HTTPStatus.OK - - with patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", - ) as parse_map: - # Reload the config entry so that the map is saved in storage and entities exist. - await hass.config_entries.async_reload(setup_entry.entry_id) - await hass.async_block_till_done() - assert hass.states.get("image.roborock_s7_maxv_upstairs") is not None - client = await hass_client() - resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - # Test that we can get the image and it correctly serialized and unserialized. - assert resp.status == HTTPStatus.OK - body = await resp.read() - assert body == img_bytes - - # Ensure that we never tried to update the map, and only used the cached image. - assert parse_map.call_count == 0 + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state async def test_fail_to_save_image( @@ -181,9 +159,6 @@ async def test_fail_to_load_image( ) -> None: """Test that we gracefully handle failing to load an image.""" with ( - patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", - ) as parse_map, patch( "homeassistant.components.roborock.roborock_storage.Path.exists", return_value=True, @@ -192,13 +167,14 @@ async def test_fail_to_load_image( "homeassistant.components.roborock.roborock_storage.Path.read_bytes", side_effect=OSError, ) as read_bytes, + patch( + "homeassistant.components.roborock.coordinator.RoborockDataUpdateCoordinator.refresh_coordinator_map" + ), ): # Reload the config entry so that the map is saved in storage and entities exist. await hass.config_entries.async_reload(setup_entry.entry_id) await hass.async_block_till_done() assert read_bytes.call_count == 4 - # Ensure that we never updated the map manually since we couldn't load it. - assert parse_map.call_count == 0 assert "Unable to read map file" in caplog.text @@ -212,7 +188,7 @@ async def test_fail_parse_on_startup( map_data = copy.deepcopy(MAP_DATA) map_data.image = None with patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ): await async_setup_component(hass, DOMAIN, {}) @@ -258,9 +234,10 @@ async def test_fail_updating_image( prop = copy.deepcopy(PROP) prop.status.in_cleaning = 1 # Update image, but get none for parse image. + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state with ( patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), patch( @@ -268,7 +245,8 @@ async def test_fail_updating_image( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), patch( "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.get_map_v1", @@ -277,4 +255,91 @@ async def test_fail_updating_image( ): async_fire_time_changed(hass, now) resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - assert not resp.ok + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state + + +async def test_index_error_map( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test that we handle failing getting the image after it has already been setup with a indexerror.""" + client = await hass_client() + now = dt_util.utcnow() + timedelta(seconds=91) + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = 1 + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state + # Update image, but get IndexError for image. + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + side_effect=IndexError, + ), + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + patch( + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, + ), + ): + async_fire_time_changed(hass, now) + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state + + +async def test_map_status_change( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test floor plan map image is correctly updated on status change.""" + assert len(hass.states.async_all("image")) == 4 + + assert hass.states.get("image.roborock_s7_maxv_upstairs") is not None + client = await hass_client() + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + old_body = await resp.read() + assert old_body[0:4] == b"\x89PNG" + + # Call a second time. This interval does not directly trigger a map update, but does + # trigger a status update which detects the state has changed and uddates the map + now = dt_util.utcnow() + V1_LOCAL_NOT_CLEANING_INTERVAL + + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.state_name = "testing" + new_map_data = copy.deepcopy(MAP_DATA) + new_map_data.image = ImageData( + 100, 10, 10, 10, 10, ImageConfig(), Image.new("RGB", (2, 2)), lambda p: p + ) + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + patch( + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, + ), + patch( + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + return_value=new_map_data, + ), + ): + async_fire_time_changed(hass, now) + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + assert resp.status == HTTPStatus.OK + body = await resp.read() + assert body is not None + assert body != old_body diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index 904a3af89d6..983e3d083f4 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -17,9 +17,10 @@ from homeassistant.components.roborock.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component -from .mock_data import HOME_DATA +from .mock_data import HOME_DATA, NETWORK_INFO, NETWORK_INFO_2 from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator @@ -173,7 +174,7 @@ async def test_remove_from_hass( bypass_api_fixture, setup_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, ) -> None: """Test that removing from hass removes any existing images.""" @@ -183,17 +184,18 @@ async def test_remove_from_hass( resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") assert resp.status == HTTPStatus.OK - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() # Flush to disk await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) - assert len(paths) == 3 # One map image and two directories + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) + assert len(paths) == 4 # Two map image and two directories await hass.config_entries.async_remove(setup_entry.entry_id) # After removal, directories should be empty. - assert not cleanup_map_storage.exists() + assert not config_entry_storage.exists() @pytest.mark.parametrize("platforms", [[Platform.IMAGE]]) @@ -201,7 +203,7 @@ async def test_oserror_remove_image( hass: HomeAssistant, bypass_api_fixture, setup_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -214,12 +216,13 @@ async def test_oserror_remove_image( assert resp.status == HTTPStatus.OK # Image content is saved when unloading - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) - assert len(paths) == 3 # One map image and two directories + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) + assert len(paths) == 4 # Two map image and two directories with patch( "homeassistant.components.roborock.roborock_storage.shutil.rmtree", @@ -242,7 +245,7 @@ async def test_not_supported_protocol( "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", return_value=home_data_copy, ): - await async_setup_component(hass, DOMAIN, {}) + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) await hass.async_block_till_done() assert "because its protocol version random" in caplog.text @@ -295,3 +298,74 @@ async def test_no_user_agreement( await hass.config_entries.async_setup(mock_roborock_entry.entry_id) assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY assert mock_roborock_entry.error_reason_translation_key == "no_user_agreement" + + +async def test_stale_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + device_registry: DeviceRegistry, +) -> None: + """Test that we remove a device if it no longer is given by home_data.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert mock_roborock_entry.state is ConfigEntryState.LOADED + existing_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(existing_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo + hd = deepcopy(HOME_DATA) + hd.devices = [hd.devices[0]] + + with ( + patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=hd, + ), + patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ), + ): + await hass.config_entries.async_reload(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + new_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert ( + len(new_devices) == 4 + ) # 2 for the one remaining robot. 1 for both the A01s which are shared and + # therefore not deleted. + + +async def test_no_stale_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + device_registry: DeviceRegistry, +) -> None: + """Test that we don't remove a device if fails to setup.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert mock_roborock_entry.state is ConfigEntryState.LOADED + existing_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(existing_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo + + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, RoborockException], + ): + await hass.config_entries.async_reload(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + new_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(new_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index e33d3aa78d5..719b398de94 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -29,7 +29,7 @@ def platforms() -> list[Platform]: async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None: """Test sensors and check test values are correctly set.""" - assert len(hass.states.async_all("sensor")) == 40 + assert len(hass.states.async_all("sensor")) == 42 assert hass.states.get("sensor.roborock_s7_maxv_main_brush_time_left").state == str( MAIN_BRUSH_REPLACE_TIME - 74382 ) @@ -53,7 +53,7 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non assert hass.states.get("sensor.roborock_s7_maxv_cleaning_area").state == "21.0" assert hass.states.get("sensor.roborock_s7_maxv_vacuum_error").state == "none" assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" - assert hass.states.get("sensor.roborock_s7_maxv_dock_error").state == "ok" + assert hass.states.get("sensor.roborock_s7_maxv_dock_dock_error").state == "ok" assert hass.states.get("sensor.roborock_s7_maxv_total_cleaning_count").state == "31" assert ( hass.states.get("sensor.roborock_s7_maxv_last_clean_begin").state @@ -63,6 +63,10 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non hass.states.get("sensor.roborock_s7_maxv_last_clean_end").state == "2023-01-01T03:43:58+00:00" ) + assert ( + hass.states.get("sensor.roborock_s7_maxv_current_room").state + == "Example room 2" + ) assert hass.states.get("sensor.dyad_pro_status").state == "drying" assert hass.states.get("sensor.dyad_pro_battery").state == "100" assert hass.states.get("sensor.dyad_pro_filter_time_left").state == "111" diff --git a/tests/components/roborock/test_switch.py b/tests/components/roborock/test_switch.py index e2df9a3498f..120c4fc4860 100644 --- a/tests/components/roborock/test_switch.py +++ b/tests/components/roborock/test_switch.py @@ -22,8 +22,8 @@ def platforms() -> list[Platform]: @pytest.mark.parametrize( ("entity_id"), [ - ("switch.roborock_s7_maxv_child_lock"), - ("switch.roborock_s7_maxv_status_indicator_light"), + ("switch.roborock_s7_maxv_dock_child_lock"), + ("switch.roborock_s7_maxv_dock_status_indicator_light"), ("switch.roborock_s7_maxv_do_not_disturb"), ], ) @@ -59,8 +59,8 @@ async def test_update_success( @pytest.mark.parametrize( ("entity_id", "service"), [ - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_ON), - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_OFF), + ("switch.roborock_s7_maxv_dock_status_indicator_light", SERVICE_TURN_ON), + ("switch.roborock_s7_maxv_dock_status_indicator_light", SERVICE_TURN_OFF), ], ) @pytest.mark.parametrize( diff --git a/tests/components/roborock/test_vacuum.py b/tests/components/roborock/test_vacuum.py index 15fdeb4767c..5d6e7a599bd 100644 --- a/tests/components/roborock/test_vacuum.py +++ b/tests/components/roborock/test_vacuum.py @@ -261,7 +261,7 @@ async def test_get_current_position( return_value=b"", ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), ): @@ -291,7 +291,9 @@ async def test_get_current_position_no_map_data( "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.get_map_v1", return_value=None, ), - pytest.raises(HomeAssistantError, match="Failed to retrieve map data."), + pytest.raises( + HomeAssistantError, match="Something went wrong creating the map" + ), ): await hass.services.async_call( DOMAIN, @@ -316,7 +318,7 @@ async def test_get_current_position_no_robot_position( return_value=b"", ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), pytest.raises(HomeAssistantError, match="Robot position not found"), diff --git a/tests/components/roku/test_binary_sensor.py b/tests/components/roku/test_binary_sensor.py index ad27a857101..c3aec4f0968 100644 --- a/tests/components/roku/test_binary_sensor.py +++ b/tests/components/roku/test_binary_sensor.py @@ -50,7 +50,7 @@ async def test_roku_binary_sensors( assert entry.unique_id == f"{UPNP_SERIAL}_supports_ethernet" assert entry.entity_category == EntityCategory.DIAGNOSTIC assert state.state == STATE_ON - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My Roku 3 Supports ethernet" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My Roku 3 Supports Ethernet" assert ATTR_DEVICE_CLASS not in state.attributes state = hass.states.get("binary_sensor.my_roku_3_supports_find_remote") @@ -125,7 +125,7 @@ async def test_rokutv_binary_sensors( assert entry.entity_category == EntityCategory.DIAGNOSTIC assert state.state == STATE_ON assert ( - state.attributes.get(ATTR_FRIENDLY_NAME) == '58" Onn Roku TV Supports ethernet' + state.attributes.get(ATTR_FRIENDLY_NAME) == '58" Onn Roku TV Supports Ethernet' ) assert ATTR_DEVICE_CLASS not in state.attributes diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index b162200f95e..9666e29579b 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -24,6 +24,7 @@ from homeassistant.components.sensor import ( async_rounded_state, async_update_suggested_units, ) +from homeassistant.components.sensor.const import STATE_CLASS_UNITS from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -2005,6 +2006,7 @@ async def test_non_numeric_device_class_with_unit_of_measurement( SensorDeviceClass.VOLUME, SensorDeviceClass.WATER, SensorDeviceClass.WEIGHT, + SensorDeviceClass.WIND_DIRECTION, SensorDeviceClass.WIND_SPEED, ], ) @@ -2035,6 +2037,37 @@ async def test_device_classes_with_invalid_unit_of_measurement( ) in caplog.text +@pytest.mark.parametrize( + "state_class", + [SensorStateClass.MEASUREMENT_ANGLE], +) +async def test_state_classes_with_invalid_unit_of_measurement( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + state_class: SensorStateClass, +) -> None: + """Test error when unit of measurement is not valid for used state class.""" + entity0 = MockSensor( + name="Test", + native_value="1.0", + state_class=state_class, + native_unit_of_measurement="INVALID!", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + units = { + str(unit) if unit else "no unit of measurement" + for unit in STATE_CLASS_UNITS.get(state_class, set()) + } + assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) + await hass.async_block_till_done() + + assert ( + f"Sensor sensor.test ({entity0.__class__}) is using native unit of " + "measurement 'INVALID!' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) in caplog.text + + @pytest.mark.parametrize( ("device_class", "state_class", "unit"), [ diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index a5b6a07dde5..962c0a0ef8f 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1,7 +1,8 @@ """The tests for sensor recorder platform.""" -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import datetime, timedelta +import logging import math from statistics import mean from typing import Any, Literal @@ -26,17 +27,30 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, process_timestamp, ) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_import_statistics, get_metadata, list_statistic_ids, ) from homeassistant.components.recorder.util import get_instance, session_scope -from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass -from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE +from homeassistant.components.sensor import ( + ATTR_OPTIONS, + DOMAIN, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.sensor.recorder import ( + MEAN_TYPE_CHANGED_ISSUE, + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, +) +from homeassistant.const import ATTR_FRIENDLY_NAME, DEGREE, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -98,6 +112,13 @@ KW_SENSOR_ATTRIBUTES = { "state_class": "measurement", "unit_of_measurement": "kW", } +WIND_DIRECTION_ATTRIBUTES = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, +} +WIND_DIRECTION_STATES_SEQ = [350, 0, 15] +TEMP_STATES_SEQ = [-10, 15, 30, 60] @pytest.fixture @@ -281,6 +302,7 @@ async def test_compile_hourly_statistics( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -306,6 +328,64 @@ async def test_compile_hourly_statistics( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(0.5802544), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( "device_class", @@ -349,7 +429,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "unit_of_measurement": state_unit, } attributes = dict(attributes) - seq = [-10, 15, 30, 60] + seq = TEMP_STATES_SEQ async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -395,6 +475,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -420,33 +501,167 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_with_some_same_last_updated_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics with the some of the same last updated value for measurement_angle. + + If the last updated value is the same we will have a zero duration. + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + entity_id = "sensor.test1" + seq = [350, 2, 15, 345] + + async def set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + await async_wait_recording_done(hass) + return hass.states.get(entity_id) + + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=10 * 5) + three = two + timedelta(seconds=40 * 5) + four = three + timedelta(seconds=10 * 5) + + states = {entity_id: []} + with freeze_time(one) as freezer: + states[entity_id].append( + await set_state( + entity_id, str(seq[0]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + # Record two states at the exact same time + freezer.move_to(two) + states[entity_id].append( + await set_state( + entity_id, str(seq[1]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + states[entity_id].append( + await set_state( + entity_id, str(seq[2]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + freezer.move_to(three) + states[entity_id].append( + await set_state( + entity_id, str(seq[3]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(6.274605), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60), - ("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) async def test_compile_hourly_statistics_with_all_same_last_updated( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: """Test compiling hourly statistics with the all of the same last updated value. @@ -457,13 +672,6 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -503,7 +711,8 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -531,45 +740,79 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 0, 60, 60), - ("temperature", "°F", "°F", "°F", "temperature", 0, 60, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) -async def test_compile_hourly_statistics_only_state_is_and_end_of_period( +async def test_compile_hourly_statistics_only_state_is_at_end_of_period( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: - """Test compiling hourly statistics when the only state at end of period.""" + """Test compiling hourly statistics when the only states are at end of period.""" zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -604,13 +847,15 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) + do_adhoc_statistics(hass, start=zero + timedelta(minutes=5)) await async_wait_recording_done(hass) statistic_ids = await async_list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -622,8 +867,8 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( assert stats == { "sensor.test1": [ { - "start": process_timestamp(zero).timestamp(), - "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "start": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), @@ -651,7 +896,10 @@ async def test_compile_hourly_statistics_purged_state_changes( statistics_unit, unit_class, ) -> None: - """Test compiling hourly statistics.""" + """Test compiling hourly statistics. + + This tests statistics falls back to the state machine when states are purged. + """ zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added @@ -691,6 +939,7 @@ async def test_compile_hourly_statistics_purged_state_changes( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -716,6 +965,94 @@ async def test_compile_hourly_statistics_purged_state_changes( assert "Error while processing event StatisticsTask" not in caplog.text +@pytest.mark.parametrize( + ( + "device_class", + "state_unit", + "display_unit", + "statistics_unit", + "unit_class", + "mean", + "min", + "max", + ), + [ + (None, "%", "%", "%", "unitless", 13.050847, -10, 30), + ], +) +async def test_compile_hourly_statistics_ignore_future_state( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + device_class, + state_unit, + display_unit, + statistics_unit, + unit_class, + mean, + min, + max, +) -> None: + """Test compiling hourly statistics. + + This tests statistics does not fall back to the state machine if the state + in the state machine is newer than the end of the statistics period. + """ + zero = get_start_time(dt_util.utcnow() + timedelta(minutes=5)) + previous_period = zero - timedelta(minutes=5) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + attributes = { + "device_class": device_class, + "state_class": "measurement", + "unit_of_measurement": state_unit, + } + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, freezer, zero, "sensor.test1", attributes + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=previous_period) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": display_unit, + "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": statistics_unit, + "unit_class": unit_class, + } + ] + stats = statistics_during_period(hass, previous_period, period="5minute") + # Check we get no stats from the previous period + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(mean), + "min": pytest.approx(min), + "max": pytest.approx(max), + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES]) async def test_compile_hourly_statistics_wrong_unit( hass: HomeAssistant, @@ -782,6 +1119,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test1", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -791,6 +1129,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": "invalid", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -801,6 +1140,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -812,6 +1152,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test6", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -822,6 +1163,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test7", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -993,6 +1335,7 @@ async def test_compile_hourly_sum_statistics_amount( "statistic_id": "sensor.test1", "display_unit_of_measurement": statistics_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1197,6 +1540,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1306,6 +1650,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1402,6 +1747,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1545,6 +1891,7 @@ async def test_compile_hourly_sum_statistics_negative_state( assert { "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1646,6 +1993,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1759,6 +2107,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1885,6 +2234,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1989,6 +2339,7 @@ async def test_compile_hourly_energy_statistics_unsupported( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2091,6 +2442,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2101,6 +2453,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test2", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2111,6 +2464,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test3", "display_unit_of_measurement": "Wh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2293,8 +2647,64 @@ async def test_compile_hourly_statistics_unchanged( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unchanged_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with no changes during the hour for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + +@pytest.mark.parametrize( + ("attributes", "expected_mean", "expected_min", "expected_max"), + [ + (TEMPERATURE_SENSOR_ATTRIBUTES, 21.1864406779661, 10.0, 25.0), + (WIND_DIRECTION_ATTRIBUTES, 21.202479155239875, None, None), + ], +) async def test_compile_hourly_statistics_partially_unavailable( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict, + expected_mean: float, + expected_min: float | None, + expected_max: float | None, ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" zero = get_start_time(dt_util.utcnow()) @@ -2302,7 +2712,7 @@ async def test_compile_hourly_statistics_partially_unavailable( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) four, states = await async_record_states_partially_unavailable( - hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES + hass, zero, "sensor.test1", attributes ) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -2318,9 +2728,9 @@ async def test_compile_hourly_statistics_partially_unavailable( { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), - "mean": pytest.approx(21.1864406779661), - "min": pytest.approx(10.0), - "max": pytest.approx(25.0), + "mean": pytest.approx(expected_mean), + "min": pytest.approx(expected_min), + "max": pytest.approx(expected_max), "last_reset": None, "state": None, "sum": None, @@ -2411,6 +2821,58 @@ async def test_compile_hourly_statistics_unavailable( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unavailable_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with one sensor being unavailable for measurement_angle. + + sensor.test1 is unavailable and should not have statistics generated + sensor.test2 should have statistics generated + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + four, states = await async_record_states_partially_unavailable( + hass, zero, "sensor.test1", WIND_DIRECTION_ATTRIBUTES + ) + with freeze_time(zero) as freezer: + _, _states = await async_record_states( + hass, + freezer, + zero, + "sensor.test2", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + states = {**states, **_states} + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test2": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -2439,59 +2901,267 @@ async def test_compile_hourly_statistics_fails( "statistic_type", ), [ - ("measurement", "area", "m²", "m²", "m²", "area", "mean"), - ("measurement", "area", "mi²", "mi²", "mi²", "area", "mean"), + ("measurement", "area", "m²", "m²", "m²", "area", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "area", + "mi²", + "mi²", + "mi²", + "area", + StatisticMeanType.ARITHMETIC, + ), ("total", "area", "m²", "m²", "m²", "area", "sum"), ("total", "area", "mi²", "mi²", "mi²", "area", "sum"), - ("measurement", "battery", "%", "%", "%", "unitless", "mean"), - ("measurement", "battery", None, None, None, "unitless", "mean"), - ("measurement", "distance", "m", "m", "m", "distance", "mean"), - ("measurement", "distance", "mi", "mi", "mi", "distance", "mean"), + ( + "measurement", + "battery", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "battery", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "m", + "m", + "m", + "distance", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "mi", + "mi", + "mi", + "distance", + StatisticMeanType.ARITHMETIC, + ), ("total", "distance", "m", "m", "m", "distance", "sum"), ("total", "distance", "mi", "mi", "mi", "distance", "sum"), ("total", "energy", "Wh", "Wh", "Wh", "energy", "sum"), ("total", "energy", "kWh", "kWh", "kWh", "energy", "sum"), - ("measurement", "energy", "Wh", "Wh", "Wh", "energy", "mean"), - ("measurement", "energy", "kWh", "kWh", "kWh", "energy", "mean"), - ("measurement", "humidity", "%", "%", "%", "unitless", "mean"), - ("measurement", "humidity", None, None, None, "unitless", "mean"), + ( + "measurement", + "energy", + "Wh", + "Wh", + "Wh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "energy", + "kWh", + "kWh", + "kWh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), ("total", "monetary", "USD", "USD", "USD", None, "sum"), ("total", "monetary", "None", "None", "None", None, "sum"), ("total", "gas", "m³", "m³", "m³", "volume", "sum"), ("total", "gas", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "monetary", "USD", "USD", "USD", None, "mean"), - ("measurement", "monetary", "None", "None", "None", None, "mean"), - ("measurement", "gas", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "gas", "ft³", "ft³", "ft³", "volume", "mean"), - ("measurement", "pressure", "Pa", "Pa", "Pa", "pressure", "mean"), - ("measurement", "pressure", "hPa", "hPa", "hPa", "pressure", "mean"), - ("measurement", "pressure", "mbar", "mbar", "mbar", "pressure", "mean"), - ("measurement", "pressure", "inHg", "inHg", "inHg", "pressure", "mean"), - ("measurement", "pressure", "psi", "psi", "psi", "pressure", "mean"), - ("measurement", "speed", "m/s", "m/s", "m/s", "speed", "mean"), - ("measurement", "speed", "mph", "mph", "mph", "speed", "mean"), - ("measurement", "temperature", "°C", "°C", "°C", "temperature", "mean"), - ("measurement", "temperature", "°F", "°F", "°F", "temperature", "mean"), - ("measurement", "volume", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "volume", "ft³", "ft³", "ft³", "volume", "mean"), + ( + "measurement", + "monetary", + "USD", + "USD", + "USD", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "monetary", + "None", + "None", + "None", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "Pa", + "Pa", + "Pa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "hPa", + "hPa", + "hPa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "mbar", + "mbar", + "mbar", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "inHg", + "inHg", + "inHg", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "psi", + "psi", + "psi", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "m/s", + "m/s", + "m/s", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "mph", + "mph", + "mph", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°C", + "°C", + "°C", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°F", + "°F", + "°F", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), ("total", "volume", "m³", "m³", "m³", "volume", "sum"), ("total", "volume", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "weight", "g", "g", "g", "mass", "mean"), - ("measurement", "weight", "oz", "oz", "oz", "mass", "mean"), + ("measurement", "weight", "g", "g", "g", "mass", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "weight", + "oz", + "oz", + "oz", + "mass", + StatisticMeanType.ARITHMETIC, + ), ("total", "weight", "g", "g", "g", "mass", "sum"), ("total", "weight", "oz", "oz", "oz", "mass", "sum"), + ( + SensorStateClass.MEASUREMENT_ANGLE, + SensorDeviceClass.WIND_DIRECTION, + DEGREE, + DEGREE, + DEGREE, + None, + StatisticMeanType.CIRCULAR, + ), ], ) async def test_list_statistic_ids( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - state_class, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - statistic_type, + state_class: str | SensorStateClass, + device_class: str | SensorDeviceClass, + state_unit: str, + display_unit: str, + statistics_unit: str, + unit_class: str | None, + statistic_type: str | StatisticMeanType, ) -> None: """Test listing future statistic ids.""" await async_setup_component(hass, "sensor", {}) @@ -2505,11 +3175,20 @@ async def test_list_statistic_ids( } hass.states.async_set("sensor.test1", 0, attributes=attributes) statistic_ids = await async_list_statistic_ids(hass) + mean_type = ( + statistic_type + if isinstance(statistic_type, StatisticMeanType) + else StatisticMeanType.NONE + ) + statistic_type = ( + statistic_type if not isinstance(statistic_type, StatisticMeanType) else "mean" + ) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2517,6 +3196,7 @@ async def test_list_statistic_ids( "unit_class": unit_class, }, ] + for stat_type in ("mean", "sum", "dogs"): statistic_ids = await async_list_statistic_ids(hass, statistic_type=stat_type) if statistic_type == stat_type: @@ -2524,7 +3204,8 @@ async def test_list_statistic_ids( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2632,6 +3313,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2667,6 +3349,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2758,6 +3441,7 @@ async def test_compile_hourly_statistics_changing_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": "cats", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2840,6 +3524,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2875,6 +3560,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2957,6 +3643,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_1, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3004,6 +3691,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3120,6 +3808,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3151,6 +3840,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3242,6 +3932,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3326,6 +4017,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3375,6 +4067,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3434,6 +4127,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3538,6 +4232,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3587,6 +4282,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3626,15 +4322,13 @@ async def test_compile_hourly_statistics_changing_device_class_2( ( "device_class", "state_unit", - "display_unit", - "statistics_unit", "unit_class", "mean", "min", "max", ), [ - (None, None, None, None, "unitless", 13.050847, -10, 30), + (None, None, "unitless", 13.050847, -10, 30), ], ) async def test_compile_hourly_statistics_changing_state_class( @@ -3642,8 +4336,6 @@ async def test_compile_hourly_statistics_changing_state_class( caplog: pytest.LogCaptureFixture, device_class, state_unit, - display_unit, - statistics_unit, unit_class, mean, min, @@ -3679,6 +4371,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3692,6 +4385,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3721,6 +4415,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3734,6 +4429,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3799,10 +4495,11 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "unit_of_measurement": "EUR", } + durations = [50, 200, 45] + def _weighted_average(seq, i, last_state): total = 0 duration = 0 - durations = [50, 200, 45] if i > 0: total += last_state * 5 duration += 5 @@ -3811,6 +4508,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( duration += dur return total / duration + def _time_weighted_circular_mean(values: list[tuple[float, int]]): + sin_sum = 0 + cos_sum = 0 + for x, dur in values: + sin_sum += math.sin(x * DEG_TO_RAD) * dur + cos_sum += math.cos(x * DEG_TO_RAD) * dur + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + def _circular_mean(values: list[float]) -> float: + sin_sum = 0 + cos_sum = 0 + for x in values: + sin_sum += math.sin(x * DEG_TO_RAD) + cos_sum += math.cos(x * DEG_TO_RAD) + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + def _min(seq, last_state): if last_state is None: return min(seq) @@ -3832,17 +4547,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } expected_minima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_maxima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} - expected_averages = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_means = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test5": [], + } expected_states = {"sensor.test4": []} expected_sums = {"sensor.test4": []} - last_states = { + last_states: dict[str, float | None] = { "sensor.test1": None, "sensor.test2": None, "sensor.test3": None, "sensor.test4": None, + "sensor.test5": None, } start = zero for i in range(24): @@ -3855,7 +4577,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test1"] expected_minima["sensor.test1"].append(_min(seq, last_state)) expected_maxima["sensor.test1"].append(_max(seq, last_state)) - expected_averages["sensor.test1"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test1"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test1"] = seq[-1] # test2 values change: min/max at the last state seq = [-10 * (i + 1), 15 * (i + 1), 30 * (i + 1)] @@ -3866,7 +4588,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test2"] expected_minima["sensor.test2"].append(_min(seq, last_state)) expected_maxima["sensor.test2"].append(_max(seq, last_state)) - expected_averages["sensor.test2"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test2"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test2"] = seq[-1] # test3 values change: min/max at the first state seq = [-10 * (23 - i + 1), 15 * (23 - i + 1), 30 * (23 - i + 1)] @@ -3877,7 +4599,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test3"] expected_minima["sensor.test3"].append(_min(seq, last_state)) expected_maxima["sensor.test3"].append(_max(seq, last_state)) - expected_averages["sensor.test3"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test3"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test3"] = seq[-1] # test4 values grow seq = [i, i + 0.5, i + 0.75] @@ -3900,6 +4622,18 @@ async def test_compile_statistics_hourly_daily_monthly_summary( ) last_states["sensor.test4"] = seq[-1] + # test5 circular mean + seq = [350 - i, 0 + (i / 2.0), 15 + i] + four, _states = await async_record_states( + hass, freezer, start, "sensor.test5", WIND_DIRECTION_ATTRIBUTES, seq + ) + states["sensor.test5"] += _states["sensor.test5"] + values = [(seq, durations[j]) for j, seq in enumerate(seq)] + if (state := last_states["sensor.test5"]) is not None: + values.append((state, 5)) + expected_means["sensor.test5"].append(_time_weighted_circular_mean(values)) + last_states["sensor.test5"] = seq[-1] + start += timedelta(minutes=5) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -3925,6 +4659,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test1", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3935,6 +4670,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test2", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3945,6 +4681,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test3", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3955,12 +4692,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test4", "display_unit_of_measurement": "EUR", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "EUR", "unit_class": None, }, + { + "statistic_id": "sensor.test5", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + }, ] # Adjust the inserted statistics @@ -3979,6 +4728,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } start = zero end = zero + timedelta(minutes=5) @@ -3988,11 +4738,10 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2", "sensor.test3", "sensor.test4", + "sensor.test5", ): expected_average = ( - expected_averages[entity_id][i] - if entity_id in expected_averages - else None + expected_means[entity_id][i] if entity_id in expected_means else None ) expected_minimum = ( expected_minima[entity_id][i] if entity_id in expected_minima else None @@ -4022,176 +4771,78 @@ async def test_compile_statistics_hourly_daily_monthly_summary( end += timedelta(minutes=5) assert stats == expected_stats - stats = statistics_during_period(hass, zero, period="hour") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } - start = zero - end = zero + timedelta(hours=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(hours=1) - end += timedelta(hours=1) - assert stats == expected_stats + def verify_stats( + period: Literal["5minute", "day", "hour", "week", "month"], + start: datetime, + next_datetime: Callable[[datetime], datetime], + ) -> None: + stats = statistics_during_period(hass, zero, period=period) + expected_stats = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + "sensor.test5": [], + } + end = next_datetime(start) + for i in range(2): + for entity_id, mean_fn in ( + ("sensor.test1", mean), + ("sensor.test2", mean), + ("sensor.test3", mean), + ("sensor.test4", mean), + ("sensor.test5", _circular_mean), + ): + expected_average = ( + mean_fn(expected_means[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_means + else None + ) + expected_minimum = ( + min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_minima + else None + ) + expected_maximum = ( + max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_maxima + else None + ) + expected_state = ( + expected_states[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_states + else None + ) + expected_sum = ( + expected_sums[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_sums + else None + ) + expected_stats[entity_id].append( + { + "start": process_timestamp(start).timestamp(), + "end": process_timestamp(end).timestamp(), + "mean": pytest.approx(expected_average), + "min": pytest.approx(expected_minimum), + "max": pytest.approx(expected_maximum), + "last_reset": None, + "state": expected_state, + "sum": expected_sum, + } + ) + start = next_datetime(start) + end = next_datetime(end) + assert stats == expected_stats + + verify_stats("hour", zero, lambda v: v + timedelta(hours=1)) - stats = statistics_during_period(hass, zero, period="day") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-31T06:00:00+00:00") - end = start + timedelta(days=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(days=1) - end += timedelta(days=1) - assert stats == expected_stats + assert start + verify_stats("day", start, lambda v: v + timedelta(days=1)) - stats = statistics_during_period(hass, zero, period="month") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-01T06:00:00+00:00") - end = dt_util.parse_datetime("2021-09-01T06:00:00+00:00") - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start = (start + timedelta(days=31)).replace(day=1) - end = (end + timedelta(days=31)).replace(day=1) - assert stats == expected_stats + assert start + verify_stats("month", start, lambda v: (v + timedelta(days=31)).replace(day=1)) assert "Error while processing event StatisticsTask" not in caplog.text @@ -4337,11 +4988,11 @@ async def test_validate_unit_change_convertible( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4562,11 +5213,11 @@ async def test_validate_statistics_unit_change_no_device_class( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4678,11 +5329,11 @@ async def test_validate_statistics_state_class_removed( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4746,11 +5397,11 @@ async def test_validate_statistics_state_class_removed_issue_cleaned_up( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Remove the statistics - empty response get_instance(hass).async_clear_statistics(["sensor.test"]) @@ -4995,11 +5646,11 @@ async def test_validate_statistics_unit_change_no_conversion( "statistic_id": "sensor.test", "supported_unit": unit1, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -5176,11 +5827,11 @@ async def test_validate_statistics_unit_change_equivalent_units_2( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Run statistics one hour later, metadata will not be updated await async_recorder_block_till_done(hass) @@ -5189,7 +5840,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) async def test_validate_statistics_other_domain( @@ -5278,7 +5929,7 @@ async def test_update_statistics_issues( now = await one_hour_stats(now) expected = { "state_class_removed_sensor.test": { - "issue_type": "state_class_removed", + "issue_type": STATE_CLASS_REMOVED_ISSUE, "statistic_id": "sensor.test", } } @@ -5482,8 +6133,9 @@ async def test_clean_up_repairs( create_issue("test", "test_issue", None) create_issue(DOMAIN, "test_issue_1", None) create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) - create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) - create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) + create_issue(DOMAIN, "test_issue_3", {"issue_type": STATE_CLASS_REMOVED_ISSUE}) + create_issue(DOMAIN, "test_issue_4", {"issue_type": UNITS_CHANGED_ISSUE}) + create_issue(DOMAIN, "test_issue_5", {"issue_type": MEAN_TYPE_CHANGED_ISSUE}) # Check the issues assert set(issue_registry.issues) == { @@ -5492,6 +6144,7 @@ async def test_clean_up_repairs( ("sensor", "test_issue_2"), ("sensor", "test_issue_3"), ("sensor", "test_issue_4"), + ("sensor", "test_issue_5"), } # Request update of issues @@ -5505,3 +6158,140 @@ async def test_clean_up_repairs( ("sensor", "test_issue_1"), ("sensor", "test_issue_2"), } + + +async def test_validate_statistics_mean_type_changed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test validate_statistics. + + This tests a validation issue is created when a the mean type is changed. + """ + now = get_start_time(dt_util.utcnow()) + + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(hass, client, {}, {}) + + # No statistics, original unit - empty response + hass.states.async_set( + "sensor.wind_direction", + 10, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + expected_log_entry = ( + "homeassistant.components.sensor.recorder", + logging.WARNING, + ( + "The statistics mean algorithm for sensor.wind_direction have changed from" + " CIRCULAR to ARITHMETIC. Generation of long term statistics will be " + "suppressed unless it changes back or go to " + "https://my.home-assistant.io/redirect/developer_statistics " + "to delete the old statistics" + ), + ) + # Valid stats, no log entry + assert expected_log_entry not in caplog.record_tuples + + # State class changed + hass.states.async_set( + "sensor.wind_direction", + 5, + attributes={ + **WIND_DIRECTION_ATTRIBUTES, + "state_class": SensorStateClass.MEASUREMENT, + }, + timestamp=now.timestamp(), + ) + expected = { + "sensor.wind_direction": [ + { + "data": { + "statistic_id": "sensor.wind_direction", + "metadata_mean_type": StatisticMeanType.CIRCULAR, + "state_mean_type": StatisticMeanType.ARITHMETIC, + }, + "type": MEAN_TYPE_CHANGED_ISSUE, + } + ], + } + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + + # Run statistics one hour later, metadata will not be updated + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now + timedelta(hours=1)) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + assert expected_log_entry in caplog.record_tuples + + # State class changed back + hass.states.async_set( + "sensor.wind_direction", + 350, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + # Issue should be resolved + await assert_validation_result(hass, client, {}, {}) diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index ddece280d8a..ec2d3d2c829 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -143,20 +143,6 @@ def get_entity( ) -def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: - """Return entity state.""" - entity = hass.states.get(entity_id) - assert entity - return entity.state - - -def get_entity_attribute(hass: HomeAssistant, entity_id: str, attribute: str) -> str: - """Return entity attribute.""" - entity = hass.states.get(entity_id) - assert entity - return entity.attributes[attribute] - - def register_device( device_registry: DeviceRegistry, config_entry: ConfigEntry ) -> DeviceEntry: diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 0063c5c2697..8f8255235be 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -102,12 +102,14 @@ MOCK_BLOCKS = [ "power": 53.4, "energy": 1234567.89, "output": True, + "totalWorkTime": 3600, }, channel="0", type="relay", overpower=0, power=53.4, energy=1234567.89, + totalWorkTime=3600, description="relay_0", set_state=AsyncMock(side_effect=lambda turn: {"ison": turn == "on"}), ), @@ -134,11 +136,20 @@ MOCK_BLOCKS = [ set_state=AsyncMock(side_effect=mock_light_set_state), ), Mock( - sensor_ids={"motion": 0, "temp": 22.1, "gas": "mild", "motionActive": 1}, + sensor_ids={ + "motion": 0, + "temp": 22.1, + "gas": "mild", + "motionActive": 1, + "sensorOp": "normal", + "selfTest": "pending", + }, channel="0", motion=0, temp=22.1, gas="mild", + sensorOp="normal", + selfTest="pending", targetTemp=4, description="sensor_0", type="sensor", @@ -502,6 +513,10 @@ def _mock_blu_rtv_device(version: str | None = None): firmware_version="some fw string", initialized=True, connected=True, + script_getcode=AsyncMock( + side_effect=lambda script_id: {"data": MOCK_SCRIPTS[script_id - 1]} + ), + xmod_info={}, ) type(device).name = PropertyMock(return_value="Test name") return device diff --git a/tests/components/shelly/snapshots/test_button.ambr b/tests/components/shelly/snapshots/test_button.ambr new file mode 100644 index 00000000000..f5a38f1b847 --- /dev/null +++ b/tests/components/shelly/snapshots/test_button.ambr @@ -0,0 +1,96 @@ +# serializer version: 1 +# name: test_rpc_blu_trv_button[button.trv_name_calibrate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.trv_name_calibrate', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TRV-Name Calibrate', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibrate', + 'unique_id': 'f8:44:77:25:f0:dd_calibrate', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_blu_trv_button[button.trv_name_calibrate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'TRV-Name Calibrate', + }), + 'context': , + 'entity_id': 'button.trv_name_calibrate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_rpc_button[button.test_name_reboot-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.test_name_reboot', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Test name Reboot', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC_reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_button[button.test_name_reboot-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Test name Reboot', + }), + 'context': , + 'entity_id': 'button.test_name_reboot', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/shelly/snapshots/test_climate.ambr b/tests/components/shelly/snapshots/test_climate.ambr new file mode 100644 index 00000000000..991c570172e --- /dev/null +++ b/tests/components/shelly/snapshots/test_climate.ambr @@ -0,0 +1,276 @@ +# serializer version: 1 +# name: test_blu_trv_climate_set_temperature[climate.trv_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 30, + 'min_temp': 4, + 'target_temp_step': 0.1, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.trv_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'f8:44:77:25:f0:dd-blutrv:200', + 'unit_of_measurement': None, + }) +# --- +# name: test_blu_trv_climate_set_temperature[climate.trv_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 15.2, + 'friendly_name': 'TRV-Name', + 'hvac_action': , + 'hvac_modes': list([ + , + ]), + 'max_temp': 30, + 'min_temp': 4, + 'supported_features': , + 'target_temp_step': 0.1, + 'temperature': 17.1, + }), + 'context': , + 'entity_id': 'climate.trv_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_hvac_mode[climate.test_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 31, + 'min_temp': 4, + 'preset_modes': list([ + 'none', + 'Profile1', + 'Profile2', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-sensor_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_hvac_mode[climate.test_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.1, + 'friendly_name': 'Test name', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 31, + 'min_temp': 4, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'Profile1', + 'Profile2', + ]), + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.test_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_rpc_climate_hvac_mode[climate.test_name_thermostat_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name_thermostat_0', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name Thermostat 0', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-thermostat:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_climate_hvac_mode[climate.test_name_thermostat_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 44.4, + 'current_temperature': 12.3, + 'friendly_name': 'Test name Thermostat 0', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 23, + }), + 'context': , + 'entity_id': 'climate.test_name_thermostat_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_wall_display_thermostat_mode[climate.test_name_thermostat_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name_thermostat_0', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name Thermostat 0', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-thermostat:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_thermostat_mode[climate.test_name_thermostat_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 44.4, + 'current_temperature': 12.3, + 'friendly_name': 'Test name Thermostat 0', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 23, + }), + 'context': , + 'entity_id': 'climate.test_name_thermostat_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 1e7c54320e8..ea3a7d5f3d2 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -39,15 +39,16 @@ async def test_block_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_channel_1_overpowering" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "overpower", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-overpower" @@ -61,19 +62,18 @@ async def test_block_binary_sensor_extra_state_attr( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_gas" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes.get("detected") == "mild" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "gas", "none") mock_block_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes.get("detected") == "none" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-gas" @@ -89,15 +89,16 @@ async def test_block_rest_binary_sensor( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -115,20 +116,22 @@ async def test_block_rest_binary_sensor_connected_battery_devices( monkeypatch.setitem(mock_block_device.settings["coiot"], "update_period", 3600) await init_integration(hass, 1, model=MODEL_MOTION) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) # Verify no update on fast intervals await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Verify update on slow intervals await mock_rest_update(hass, freezer, seconds=UPDATE_PERIOD_MULTIPLIER * 3600) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -149,15 +152,16 @@ async def test_block_sleeping_binary_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "motion", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-motion" @@ -183,14 +187,16 @@ async def test_block_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_block_restored_sleeping_binary_sensor_no_last_state( @@ -214,14 +220,16 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_binary_sensor( @@ -234,17 +242,18 @@ async def test_rpc_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_cover_0_overpowering" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "errors", "overpower" ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0-overpower" @@ -290,20 +299,22 @@ async def test_rpc_sleeping_binary_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cloud", "connected", True) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON - - # test external power sensor - state = hass.states.get("binary_sensor.test_name_external_power") - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get("binary_sensor.test_name_external_power") - assert entry + # test external power sensor + assert (state := hass.states.get("binary_sensor.test_name_external_power")) + assert state.state == STATE_ON + + assert ( + entry := entity_registry.async_get("binary_sensor.test_name_external_power") + ) assert entry.unique_id == "123456789ABC-devicepower:0-external_power" @@ -331,14 +342,16 @@ async def test_rpc_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_restored_sleeping_binary_sensor_no_last_state( @@ -364,7 +377,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -375,7 +389,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize( @@ -407,17 +422,17 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:203-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( @@ -450,8 +465,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_binary_sensor_when_orphaned( @@ -475,8 +489,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_binary_sensor_entity( diff --git a/tests/components/shelly/test_button.py b/tests/components/shelly/test_button.py index 14349411670..2057076d18b 100644 --- a/tests/components/shelly/test_button.py +++ b/tests/components/shelly/test_button.py @@ -2,12 +2,17 @@ from unittest.mock import Mock +from aioshelly.const import MODEL_BLU_GATEWAY_G3 +from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.shelly.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_registry import EntityRegistry from . import init_integration @@ -22,10 +27,10 @@ async def test_block_button( entity_id = "button.test_name_reboot" # reboot button - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC_reboot" await hass.services.async_call( @@ -38,7 +43,10 @@ async def test_block_button( async def test_rpc_button( - hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test rpc device OTA button.""" await init_integration(hass, 2) @@ -46,11 +54,11 @@ async def test_rpc_button( entity_id = "button.test_name_reboot" # reboot button - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC_reboot" + assert (entry := entity_registry.async_get(entity_id)) + assert entry == snapshot(name=f"{entity_id}-entry") await hass.services.async_call( BUTTON_DOMAIN, @@ -61,6 +69,68 @@ async def test_rpc_button( assert mock_rpc_device.trigger_reboot.call_count == 1 +@pytest.mark.parametrize( + ("exception", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for button.test_name_reboot of Test name", + ), + ( + RpcCallError(999), + "RPC call error occurred while calling action for button.test_name_reboot of Test name", + ), + ], +) +async def test_rpc_button_exc( + hass: HomeAssistant, + mock_rpc_device: Mock, + exception: Exception, + error: str, +) -> None: + """Test RPC button with exception.""" + await init_integration(hass, 2) + + mock_rpc_device.trigger_reboot.side_effect = exception + + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_name_reboot"}, + blocking=True, + ) + + +async def test_rpc_button_reauth_error( + hass: HomeAssistant, mock_rpc_device: Mock +) -> None: + """Test rpc device OTA button with authentication error.""" + entry = await init_integration(hass, 2) + + mock_rpc_device.trigger_reboot.side_effect = InvalidAuthError + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_name_reboot"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id + + @pytest.mark.parametrize( ("gen", "old_unique_id", "new_unique_id", "migration"), [ @@ -104,3 +174,107 @@ async def test_migrate_unique_id( bool("Migrating unique_id for button.test_name_reboot" in caplog.text) == migration ) + + +async def test_rpc_blu_trv_button( + hass: HomeAssistant, + mock_blu_trv: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, +) -> None: + """Test RPC BLU TRV button.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + entity_id = "button.trv_name_calibrate" + + state = hass.states.get(entity_id) + assert state == snapshot(name=f"{entity_id}-state") + + entry = entity_registry.async_get(entity_id) + assert entry == snapshot(name=f"{entity_id}-entry") + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert mock_blu_trv.trigger_blu_trv_calibration.call_count == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for button.trv_name_calibrate of Test name", + ), + ( + RpcCallError(999), + "RPC call error occurred while calling action for button.trv_name_calibrate of Test name", + ), + ], +) +async def test_rpc_blu_trv_button_exc( + hass: HomeAssistant, + mock_blu_trv: Mock, + monkeypatch: pytest.MonkeyPatch, + exception: Exception, + error: str, +) -> None: + """Test RPC BLU TRV button with exception.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + mock_blu_trv.trigger_blu_trv_calibration.side_effect = exception + + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.trv_name_calibrate"}, + blocking=True, + ) + + +async def test_rpc_blu_trv_button_auth_error( + hass: HomeAssistant, + mock_blu_trv: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC BLU TRV button with authentication error.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + entry = await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + mock_blu_trv.trigger_blu_trv_calibration.side_effect = InvalidAuthError + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.trv_name_calibrate"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index c78e87ebfce..b2135fb38af 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -5,12 +5,14 @@ from unittest.mock import AsyncMock, Mock, PropertyMock from aioshelly.const import ( BLU_TRV_IDENTIFIER, + BLU_TRV_TIMEOUT, MODEL_BLU_GATEWAY_G3, MODEL_VALVE, MODEL_WALL_DISPLAY, ) from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ( ATTR_CURRENT_HUMIDITY, @@ -26,7 +28,7 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.components.shelly.const import BLU_TRV_TIMEOUT, DOMAIN +from homeassistant.components.shelly.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( @@ -42,13 +44,7 @@ from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from . import ( - MOCK_MAC, - get_entity_attribute, - init_integration, - register_device, - register_entity, -) +from . import MOCK_MAC, init_integration, register_device, register_entity from .conftest import MOCK_STATUS_COAP from tests.common import mock_restore_cache, mock_restore_cache_with_extra_data @@ -65,6 +61,7 @@ async def test_climate_hvac_mode( mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, entity_registry: EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test climate hvac mode service.""" monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") @@ -83,12 +80,9 @@ async def test_climate_hvac_mode( await hass.async_block_till_done(wait_background_tasks=True) # Test initial hvac mode - off - state = hass.states.get(ENTITY_ID) - assert state.state == HVACMode.OFF + assert hass.states.get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-state") - entry = entity_registry.async_get(ENTITY_ID) - assert entry - assert entry.unique_id == "123456789ABC-sensor_0" + assert entity_registry.async_get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-entry") # Test set hvac mode heat await hass.services.async_call( @@ -103,7 +97,8 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 20.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.HEAT # Test set hvac mode off @@ -120,13 +115,13 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 4.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF # Test unavailable on error monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 1) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == STATE_UNAVAILABLE @@ -143,7 +138,7 @@ async def test_climate_set_temperature( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF assert state.attributes[ATTR_TEMPERATURE] == 4 @@ -197,7 +192,7 @@ async def test_climate_set_preset_mode( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE # Test set Profile2 @@ -215,7 +210,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 2) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == "Profile2" # Set preset to none @@ -234,7 +229,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE @@ -269,23 +264,26 @@ async def test_block_restored_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Test set hvac mode heat, target temp should be set to last target temp (22) await hass.services.async_call( @@ -300,9 +298,10 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 22.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 22.0 + assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 async def test_block_restored_climate_us_customary( @@ -337,17 +336,19 @@ async def test_block_restored_climate_us_customary( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) @@ -356,9 +357,10 @@ async def test_block_restored_climate_us_customary( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 65 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 65 # Test set hvac mode heat, target temp should be set to last target temp (10.0/50) await hass.services.async_call( @@ -373,9 +375,10 @@ async def test_block_restored_climate_us_customary( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 10.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 50 + assert state.attributes.get(ATTR_TEMPERATURE) == 50 async def test_block_restored_climate_unavailable( @@ -403,7 +406,8 @@ async def test_block_restored_climate_unavailable( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF async def test_block_restored_climate_set_preset_before_online( @@ -431,7 +435,8 @@ async def test_block_restored_climate_set_preset_before_online( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.HEAT + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.HEAT with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -460,7 +465,10 @@ async def test_block_set_mode_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for climate.test_name of Test name", + ): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -603,27 +611,21 @@ async def test_rpc_climate_hvac_mode( entity_registry: EntityRegistry, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test climate hvac mode service.""" entity_id = "climate.test_name_thermostat_0" await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) - assert state.state == HVACMode.HEAT - assert state.attributes[ATTR_TEMPERATURE] == 23 - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-thermostat:0" + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 @@ -639,7 +641,7 @@ async def test_rpc_climate_hvac_mode( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.OFF @@ -657,15 +659,14 @@ async def test_rpc_climate_without_humidity( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert ATTR_CURRENT_HUMIDITY not in state.attributes - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" @@ -677,7 +678,7 @@ async def test_rpc_climate_set_temperature( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 23 monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) @@ -692,7 +693,7 @@ async def test_rpc_climate_set_temperature( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 28 @@ -707,7 +708,7 @@ async def test_rpc_climate_hvac_mode_cool( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.COOL assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING @@ -717,6 +718,7 @@ async def test_wall_display_thermostat_mode( mock_rpc_device: Mock, entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test Wall Display in thermostat mode.""" climate_entity_id = "climate.test_name_thermostat_0" @@ -730,13 +732,11 @@ async def test_wall_display_thermostat_mode( # the climate entity should be created state = hass.states.get(climate_entity_id) - assert state - assert state.state == HVACMode.HEAT + assert state == snapshot(name=f"{climate_entity_id}-state") assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 entry = entity_registry.async_get(climate_entity_id) - assert entry - assert entry.unique_id == "123456789ABC-thermostat:0" + assert entry == snapshot(name=f"{climate_entity_id}-entry") async def test_wall_display_thermostat_mode_external_actuator( @@ -757,26 +757,25 @@ async def test_wall_display_thermostat_mode_external_actuator( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 # the climate entity should be created - state = hass.states.get(climate_entity_id) - assert state + assert (state := hass.states.get(climate_entity_id)) assert state.state == HVACMode.HEAT assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 - entry = entity_registry.async_get(climate_entity_id) - assert entry + assert (entry := entity_registry.async_get(climate_entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" async def test_blu_trv_climate_set_temperature( hass: HomeAssistant, mock_blu_trv: Mock, + entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test BLU TRV set target temperature.""" @@ -785,7 +784,9 @@ async def test_blu_trv_climate_set_temperature( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") + + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem( mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "target_C", 28 @@ -808,7 +809,8 @@ async def test_blu_trv_climate_set_temperature( BLU_TRV_TIMEOUT, ) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 28 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 28 async def test_blu_trv_climate_disabled( @@ -823,14 +825,16 @@ async def test_blu_trv_climate_disabled( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 17.1 monkeypatch.setitem( mock_blu_trv.config[f"{BLU_TRV_IDENTIFIER}:200"], "enable", False ) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) is None + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] is None async def test_blu_trv_climate_hvac_action( @@ -845,9 +849,11 @@ async def test_blu_trv_climate_hvac_action( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.IDLE + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE monkeypatch.setitem(mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "pos", 10) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.HEATING + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 0b2d355cfd8..fffffc21cae 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -24,6 +24,7 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_MODEL, @@ -744,6 +745,38 @@ async def test_zeroconf_sleeping_device_error(hass: HomeAssistant) -> None: assert result["reason"] == "cannot_connect" +async def test_options_flow_abort_setup_retry( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device is in setup retry.""" + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + entry = await init_integration(hass, 2) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_options_flow_abort_no_scripts_support( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device does not support scripts.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=False) + ) + entry = await init_integration(hass, 2) + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_scripts_support" + + async def test_zeroconf_already_configured(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -1080,7 +1113,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.DISABLED + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.DISABLED result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1096,7 +1129,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.ACTIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.ACTIVE result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1112,7 +1145,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.PASSIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.PASSIVE await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 55a1d8958cd..f89bec8853a 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -32,7 +32,6 @@ from homeassistant.helpers import device_registry as dr, issue_registry as ir from . import ( MOCK_MAC, - get_entity_state, init_integration, inject_rpc_device_event, mock_polling_rpc_update, @@ -72,7 +71,7 @@ async def test_block_reload_on_cfg_change( async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Generate config change from switch to light monkeypatch.setitem( @@ -82,7 +81,7 @@ async def test_block_reload_on_cfg_change( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -114,14 +113,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Test no reload on effect change monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "effect", 1) @@ -129,14 +128,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") async def test_block_polling_auth_error( @@ -245,14 +244,16 @@ async def test_block_polling_connection_error( ) await init_integration(hass, 1) - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_ON + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 15)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) @@ -270,12 +271,14 @@ async def test_block_rest_update_connection_error( await init_integration(hass, 1) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setattr(mock_block_device, "update_shelly", AsyncMock(side_effect=exc)) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_sleeping_device_no_periodic_updates( @@ -297,14 +300,16 @@ async def test_block_sleeping_device_no_periodic_updates( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 3600)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_device_push_updates_failure( @@ -416,7 +421,7 @@ async def test_rpc_reload_on_cfg_change( ) await hass.async_block_till_done() - assert hass.states.get("switch.test_switch_0") is not None + assert hass.states.get("switch.test_switch_0") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -596,14 +601,16 @@ async def test_rpc_sleeping_device_no_periodic_updates( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 1000)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_firmware_unsupported( @@ -716,7 +723,8 @@ async def test_rpc_reconnect_error( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock(side_effect=exc)) @@ -726,7 +734,8 @@ async def test_rpc_reconnect_error( async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE async def test_rpc_error_running_connected_events( @@ -748,14 +757,17 @@ async def test_rpc_error_running_connected_events( ) assert "Error running connected events for device" in caplog.text - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE # Move time to generate reconnect without error freezer.tick(timedelta(seconds=RPC_RECONNECT_INTERVAL)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON async def test_rpc_polling_connection_error( @@ -776,11 +788,13 @@ async def test_rpc_polling_connection_error( ), ) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_polling_disconnected( @@ -795,11 +809,13 @@ async def test_rpc_polling_disconnected( monkeypatch.setattr(mock_rpc_device, "connected", False) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_update_entry_fw_ver( @@ -837,12 +853,17 @@ async def test_rpc_update_entry_fw_ver( assert device.sw_version == "99.0.0" +@pytest.mark.parametrize(("supports_scripts"), [True, False]) async def test_rpc_runs_connected_events_when_initialized( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + supports_scripts: bool, ) -> None: """Test RPC runs connected events when initialized.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=supports_scripts) + ) monkeypatch.setattr(mock_rpc_device, "initialized", False) await init_integration(hass, 2) @@ -853,8 +874,9 @@ async def test_rpc_runs_connected_events_when_initialized( mock_rpc_device.mock_initialized() await hass.async_block_till_done() - # BLE script list is called during connected events - assert call.script_list() in mock_rpc_device.mock_calls + assert call.supports_scripts() in mock_rpc_device.mock_calls + # BLE script list is called during connected events if device supports scripts + assert bool(call.script_list() in mock_rpc_device.mock_calls) == supports_scripts async def test_rpc_sleeping_device_unload_ignore_ble_scanner( @@ -903,7 +925,8 @@ async def test_block_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -917,7 +940,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -925,7 +949,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_connection_error( @@ -954,7 +979,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -968,7 +994,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -976,7 +1003,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_late_setup( @@ -1001,7 +1029,8 @@ async def test_rpc_sleeping_device_late_setup( monkeypatch.setattr(mock_rpc_device, "connected", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_rpc_already_connected( diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index 40a364fd435..df3ab4f288d 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -47,7 +47,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -56,7 +56,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -64,7 +65,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -72,10 +74,10 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-roller_0" @@ -86,11 +88,15 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == CoverState.CLOSED + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == CoverState.OPEN + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.OPEN async def test_block_device_no_roller_blocks( @@ -99,6 +105,7 @@ async def test_block_device_no_roller_blocks( """Test block device without roller blocks.""" monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "type", None) await init_integration(hass, 1) + assert hass.states.get("cover.test_name") is None @@ -118,7 +125,7 @@ async def test_rpc_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 mutate_rpc_device_status( @@ -131,7 +138,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.OPENING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -143,7 +152,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -153,10 +164,10 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" @@ -166,6 +177,7 @@ async def test_rpc_device_no_cover_keys( """Test RPC device without cover keys.""" monkeypatch.delitem(mock_rpc_device.status, "cover:0") await init_integration(hass, 2) + assert hass.states.get("cover.test_cover_0") is None @@ -175,11 +187,16 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_device_no_position_control( @@ -190,7 +207,10 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_cover_tilt( @@ -212,11 +232,10 @@ async def test_rpc_cover_tilt( await init_integration(hass, 3) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" await hass.services.async_call( @@ -228,7 +247,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 50) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( @@ -240,7 +259,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 100) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 await hass.services.async_call( @@ -258,5 +277,5 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 10) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index 89045208d20..ca9edb19fa7 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -168,7 +168,10 @@ async def test_get_triggers_for_invalid_device_id( connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - with pytest.raises(InvalidDeviceAutomationConfig): + with pytest.raises( + InvalidDeviceAutomationConfig, + match="not found while configuring device automation triggers", + ): await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, invalid_device.id ) @@ -384,7 +387,10 @@ async def test_validate_trigger_invalid_triggers( }, ) - assert "Invalid (type,subtype): ('single', 'button3')" in caplog.text + assert ( + "Invalid device automation trigger (type, subtype): ('single', 'button3')" + in caplog.text + ) async def test_rpc_no_runtime_data( diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 3826631c580..84ebd50c425 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -109,8 +109,14 @@ async def test_rpc_config_entry_diagnostics( "bluetooth": { "scanner": { "connectable": False, - "current_mode": None, - "requested_mode": None, + "current_mode": { + "__type": "", + "repr": "", + }, + "requested_mode": { + "__type": "", + "repr": "", + }, "discovered_device_timestamps": {"AA:BB:CC:DD:EE:FF": ANY}, "discovered_devices_and_advertisement_data": [ { @@ -194,3 +200,21 @@ async def test_rpc_config_entry_diagnostics_ws_outbound( result["device_settings"]["ws_outbound_server_valid"] == ws_outbound_server_valid ) + + +async def test_rpc_config_entry_diagnostics_no_ws( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test config entry diagnostics for rpc device which doesn't support ws outbound.""" + config = deepcopy(mock_rpc_device.config) + config.pop("ws") + monkeypatch.setattr(mock_rpc_device, "config", config) + + entry = await init_integration(hass, 3) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result["device_settings"]["ws_outbound"] == "not supported" diff --git a/tests/components/shelly/test_event.py b/tests/components/shelly/test_event.py index e184c154697..a5367408955 100644 --- a/tests/components/shelly/test_event.py +++ b/tests/components/shelly/test_event.py @@ -33,8 +33,7 @@ async def test_rpc_button( await init_integration(hass, 2) entity_id = "event.test_name_input_0" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["btn_down", "btn_up", "double_push", "long_push", "single_push", "triple_push"] @@ -42,8 +41,7 @@ async def test_rpc_button( assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:0" inject_rpc_device_event( @@ -62,7 +60,7 @@ async def test_rpc_button( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "single_push" @@ -78,11 +76,9 @@ async def test_rpc_script_1_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") inject_rpc_device_event( monkeypatch, @@ -101,7 +97,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "script_start" inject_rpc_device_event( @@ -121,7 +117,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) != "unknown_event" @@ -135,11 +131,9 @@ async def test_rpc_script_2_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_2_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -152,11 +146,9 @@ async def test_rpc_script_ble_event( await init_integration(hass, 2) entity_id = f"event.test_name_{BLE_SCRIPT_NAME}" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") async def test_rpc_event_removal( @@ -186,15 +178,13 @@ async def test_block_event( await init_integration(hass, 1) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered(["single", "long"]) assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-1" monkeypatch.setattr( @@ -206,7 +196,7 @@ async def test_block_event( mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "long" @@ -217,8 +207,7 @@ async def test_block_event_shix3_1( await init_integration(hass, 1, model=MODEL_I3) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["double", "long", "long_single", "single", "single_long", "triple"] ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index ef9b8f72616..129aa812580 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -10,6 +10,7 @@ from aioshelly.exceptions import ( DeviceConnectionError, InvalidAuthError, MacAddressMismatchError, + RpcCallError, ) from aioshelly.rpc_device.utils import bluetooth_mac_from_primary_mac import pytest @@ -306,7 +307,8 @@ async def test_sleeping_rpc_device_online_during_setup( assert "will resume when device is online" in caplog.text assert "is online (source: setup)" in caplog.text - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_sleeping_rpc_device_offline_during_setup( @@ -335,7 +337,7 @@ async def test_sleeping_rpc_device_offline_during_setup( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + assert hass.states.get("sensor.test_name_temperature") @pytest.mark.parametrize( @@ -359,13 +361,15 @@ async def test_entry_unload( entry = await init_integration(hass, gen) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED - assert hass.states.get(entity_id).state is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -383,9 +387,9 @@ async def test_entry_unload_device_not_ready( mock_rpc_device: Mock, ) -> None: """Test entry unload when device is not ready.""" - entry = await init_integration(hass, gen, sleep_period=1000) - + assert (entry := await init_integration(hass, gen, sleep_period=1000)) assert entry.state is ConfigEntryState.LOADED + assert hass.states.get(entity_id) is None await hass.config_entries.async_unload(entry.entry_id) @@ -404,13 +408,15 @@ async def test_entry_unload_not_connected( with patch( "homeassistant.components.shelly.coordinator.async_stop_scanner" ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -433,13 +439,15 @@ async def test_entry_unload_not_connected_but_we_think_we_are( "homeassistant.components.shelly.coordinator.async_stop_scanner", side_effect=DeviceConnectionError, ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -472,7 +480,9 @@ async def test_entry_missing_gen(hass: HomeAssistant, mock_block_device: Mock) - entry = await init_integration(hass, None) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get("switch.test_name_channel_1").state is STATE_ON + + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON async def test_entry_missing_port(hass: HomeAssistant) -> None: @@ -555,3 +565,17 @@ async def test_bluetooth_cleanup_on_remove_entry( remove_mock.assert_called_once_with( hass, format_mac(bluetooth_mac_from_primary_mac(entry.unique_id)).upper() ) + + +async def test_device_script_getcode_error( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test device script get code error.""" + monkeypatch.setattr( + mock_rpc_device, "script_getcode", AsyncMock(side_effect=RpcCallError(0)) + ) + + entry = await init_integration(hass, 2) + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 482821aa966..0dab06f53a9 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -65,18 +65,17 @@ async def test_block_device_rgbw_bulb( await init_integration(hass, 1, model=MODEL_BULB) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGBW, ] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT - assert len(attributes[ATTR_EFFECT_LIST]) == 7 - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT + assert len(state.attributes[ATTR_EFFECT_LIST]) == 7 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -89,7 +88,7 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGBW = [70, 80, 90, 20], brightness = 33, effect = Flash @@ -108,13 +107,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, white=30, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -127,14 +125,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_0" @@ -154,21 +150,20 @@ async def test_block_device_rgb_bulb( await init_integration(hass, 1, model=MODEL_BULB_RGBW) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGB, ] assert ( - attributes[ATTR_SUPPORTED_FEATURES] + state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION ) - assert len(attributes[ATTR_EFFECT_LIST]) == 4 - assert attributes[ATTR_EFFECT] == "Off" + assert len(state.attributes[ATTR_EFFECT_LIST]) == 4 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -181,7 +176,7 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGB = [70, 80, 90], brightness = 33, effect = Flash @@ -200,13 +195,12 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -219,11 +213,10 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 # Turn on with unsupported effect mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -236,14 +229,13 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", mode="color" ) - state = hass.states.get(entity_id) - attributes = state.attributes + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_EFFECT] == "Off" assert "Effect 'Breath' not supported" in caplog.text - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -272,12 +264,11 @@ async def test_block_device_white_bulb( await init_integration(hass, 1, model=MODEL_VINTAGE_V2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 128 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_BRIGHTNESS] == 128 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -290,7 +281,7 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -304,13 +295,11 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_BRIGHTNESS] == 33 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -343,9 +332,8 @@ async def test_block_device_support_transition( await init_integration(hass, 1, model=model) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes - assert attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION # Turn on, TRANSITION = 4 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -358,7 +346,7 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", transition=4000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, TRANSITION = 6, limit to 5000ms @@ -372,11 +360,10 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off", transition=5000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -403,14 +390,14 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID], "description", "relay_1" ) await init_integration(hass, 1) + assert hass.states.get("switch.test_name_channel_1") is None # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] - assert attributes[ATTR_SUPPORTED_FEATURES] == 0 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 # Turn off mock_block_device.blocks[RELAY_BLOCK_ID].set_state.reset_mock() @@ -423,7 +410,7 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -437,11 +424,10 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="on" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_1" @@ -451,6 +437,7 @@ async def test_block_device_no_light_blocks( """Test block device without light blocks.""" monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "type", "roller") await init_integration(hass, 1) + assert hass.states.get("light.test_name_channel_1") is None @@ -473,7 +460,9 @@ async def test_rpc_device_switch_type_lights_mode( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON mutate_rpc_device_status(monkeypatch, mock_rpc_device, "switch:0", "output", False) await hass.services.async_call( @@ -483,10 +472,11 @@ async def test_rpc_device_switch_type_lights_mode( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -510,7 +500,8 @@ async def test_rpc_light( ) mock_rpc_device.call_rpc.assert_called_once_with("Light.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 135 @@ -528,7 +519,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": False} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -547,7 +539,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "brightness": 13} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 33 @@ -565,7 +558,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "transition_duration": 10.1} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, transition = 0.4, should be limited to 0.5 @@ -584,11 +578,10 @@ async def test_rpc_light( "Light.Set", {"id": 0, "on": False, "transition_duration": 0.5} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light:0" @@ -606,12 +599,11 @@ async def test_rpc_device_rgb_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGB = [70, 80, 90] await hass.services.async_call( @@ -628,14 +620,12 @@ async def test_rpc_device_rgb_profile( "RGB.Set", {"id": 0, "on": True, "rgb": [70, 80, 90]} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgb:0" @@ -653,12 +643,11 @@ async def test_rpc_device_rgbw_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGBW = [72, 82, 92, 128] await hass.services.async_call( @@ -678,14 +667,12 @@ async def test_rpc_device_rgbw_profile( "RGBW.Set", {"id": 0, "on": True, "rgb": [72, 82, 92], "white": 128} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgbw:0" @@ -730,9 +717,11 @@ async def test_rpc_rgbw_device_light_mode_remove_others( # verify we have 4 lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): entity_id = f"light.test_light_{i}" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-light:{i}" # verify RGB & RGBW entities removed @@ -793,9 +782,11 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( # verify we have RGB/w light entity_id = f"light.test_{active_mode}_0" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{active_mode}:0" # verify light & RGB/W entities removed @@ -823,8 +814,7 @@ async def test_rpc_cct_light( await init_integration(hass, 2) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cct:0" # Turn off @@ -836,7 +826,8 @@ async def test_rpc_cct_light( ) mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": False}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -851,7 +842,8 @@ async def test_rpc_cct_light( mock_rpc_device.mock_update() mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_BRIGHTNESS] == 196 # 77% of 255 @@ -874,7 +866,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "brightness": 88} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 224 # 88% of 255 @@ -894,7 +887,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "ct": 4444} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4444 diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index 6bddd1eeb23..41002917d86 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -3,7 +3,7 @@ from copy import deepcopy from unittest.mock import AsyncMock, Mock -from aioshelly.const import MODEL_BLU_GATEWAY_G3 +from aioshelly.const import BLU_TRV_TIMEOUT, MODEL_BLU_GATEWAY_G3 from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from syrupy import SnapshotAssertion @@ -18,7 +18,7 @@ from homeassistant.components.number import ( SERVICE_SET_VALUE, NumberMode, ) -from homeassistant.components.shelly.const import BLU_TRV_TIMEOUT, DOMAIN +from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State @@ -54,15 +54,16 @@ async def test_block_number_update( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valvePos", 30) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "30" + assert (state := hass.states.get(entity_id)) + assert state.state == "30" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-valvePos" @@ -103,14 +104,16 @@ async def test_block_restored_number( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "40" + assert (state := hass.states.get(entity_id)) + assert state.state == "40" # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_restored_number_no_last_state( @@ -141,14 +144,16 @@ async def test_block_restored_number_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_number_set_value( @@ -200,7 +205,10 @@ async def test_block_set_value_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for number.test_name_valve_position of Test name", + ): await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -302,8 +310,7 @@ async def test_rpc_device_virtual_number( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.3" assert state.attributes.get(ATTR_MIN) == 0 assert state.attributes.get(ATTR_MAX) == 100 @@ -311,13 +318,13 @@ async def test_rpc_device_virtual_number( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit assert state.attributes.get(ATTR_MODE) is mode - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "78.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "78.9" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) await hass.services.async_call( @@ -327,7 +334,8 @@ async def test_rpc_device_virtual_number( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_virtual_number_when_mode_label( @@ -365,8 +373,7 @@ async def test_rpc_remove_virtual_number_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_number_when_orphaned( @@ -390,8 +397,7 @@ async def test_rpc_remove_virtual_number_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_number_entity( @@ -427,7 +433,8 @@ async def test_blu_trv_ext_temp_set_value( # After HA start the state should be unknown because there was no previous external # temperature report - assert hass.states.get(entity_id).state is STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN await hass.services.async_call( NUMBER_DOMAIN, @@ -449,7 +456,8 @@ async def test_blu_trv_ext_temp_set_value( BLU_TRV_TIMEOUT, ) - assert hass.states.get(entity_id).state == "22.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.2" async def test_blu_trv_valve_pos_set_value( @@ -465,7 +473,8 @@ async def test_blu_trv_valve_pos_set_value( entity_id = f"{NUMBER_DOMAIN}.trv_name_valve_position" - assert hass.states.get(entity_id).state == "0" + assert (state := hass.states.get(entity_id)) + assert state.state == "0" monkeypatch.setitem(mock_blu_trv.status["blutrv:200"], "pos", 20) await hass.services.async_call( @@ -490,4 +499,5 @@ async def test_blu_trv_valve_pos_set_value( # device only accepts int for 'pos' value assert isinstance(mock_blu_trv.call_rpc.call_args[0][1]["params"]["pos"], int) - assert hass.states.get(entity_id).state == "20" + assert (state := hass.states.get(entity_id)) + assert state.state == "20" diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py index 0a6eb2a5843..39e426baa58 100644 --- a/tests/components/shelly/test_select.py +++ b/tests/components/shelly/test_select.py @@ -56,8 +56,7 @@ async def test_rpc_device_virtual_enum( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_OPTIONS) == [ "Title 1", @@ -65,13 +64,14 @@ async def test_rpc_device_virtual_enum( "option 3", ] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "option 2" + + assert (state := hass.states.get(entity_id)) + assert state.state == "option 2" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") await hass.services.async_call( @@ -83,7 +83,9 @@ async def test_rpc_device_virtual_enum( # 'Title 1' corresponds to 'option 1' assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "Title 1" + + assert (state := hass.states.get(entity_id)) + assert state.state == "Title 1" async def test_rpc_remove_virtual_enum_when_mode_label( @@ -122,8 +124,7 @@ async def test_rpc_remove_virtual_enum_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_enum_when_orphaned( @@ -147,5 +148,4 @@ async def test_rpc_remove_virtual_enum_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index d0fec65c7de..7edd38a4b31 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -40,7 +40,6 @@ from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from . import ( - get_entity_state, init_integration, mock_polling_rpc_update, mock_rest_update, @@ -66,15 +65,16 @@ async def test_block_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_power" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "53.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "53.4" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "power", 60.1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "60.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "60.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-power" @@ -85,14 +85,13 @@ async def test_energy_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_energy" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # 1234567.89 Wmin / 60 / 1000 = 20.5761315 kWh assert state.state == "20.5761315" # suggested unit is KWh assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-energy" @@ -111,13 +110,12 @@ async def test_power_factory_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # Value of 0.98 is converted to 98.0% assert state.state == "98.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -128,12 +126,11 @@ async def test_power_factory_without_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "0.98" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -147,12 +144,14 @@ async def test_block_rest_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "rssi") await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "-64" + assert (state := hass.states.get(entity_id)) + assert state.state == "-64" monkeypatch.setitem(mock_block_device.status["wifi_sta"], "rssi", -71) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == "-71" + assert (state := hass.states.get(entity_id)) + assert state.state == "-71" async def test_block_sleeping_sensor( @@ -175,15 +174,16 @@ async def test_block_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "temp", 23.4) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" @@ -211,8 +211,7 @@ async def test_block_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "20.4" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.TEMPERATURE @@ -222,7 +221,8 @@ async def test_block_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_restored_sleeping_sensor_no_last_state( @@ -246,14 +246,16 @@ async def test_block_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_sensor_error( @@ -266,15 +268,16 @@ async def test_block_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_battery" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "98" + assert (state := hass.states.get(entity_id)) + assert state.state == "98" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "battery", -1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-battery" @@ -321,7 +324,8 @@ async def test_block_not_matched_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" # Make device online monkeypatch.setattr( @@ -331,7 +335,8 @@ async def test_block_not_matched_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" async def test_block_sensor_without_value( @@ -345,17 +350,94 @@ async def test_block_sensor_without_value( assert hass.states.get(entity_id) is None -async def test_block_sensor_unknown_value( - hass: HomeAssistant, mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch +@pytest.mark.parametrize( + ("entity", "initial_state", "block_id", "attribute", "value", "final_value"), + [ + ("test_name_battery", "98", DEVICE_BLOCK_ID, "battery", None, STATE_UNKNOWN), + ( + "test_name_operation", + "normal", + SENSOR_BLOCK_ID, + "sensorOp", + None, + STATE_UNKNOWN, + ), + ( + "test_name_operation", + "normal", + SENSOR_BLOCK_ID, + "sensorOp", + "normal", + "normal", + ), + ( + "test_name_self_test", + "pending", + SENSOR_BLOCK_ID, + "selfTest", + "completed", + "completed", + ), + ( + "test_name_gas_detected", + "mild", + SENSOR_BLOCK_ID, + "gas", + "heavy", + "heavy", + ), + ], +) +async def test_block_sensor_values( + hass: HomeAssistant, + mock_block_device: Mock, + monkeypatch: pytest.MonkeyPatch, + entity: str, + initial_state: str, + block_id: int, + attribute: str, + value: str | None, + final_value: str, ) -> None: """Test block sensor unknown value.""" - entity_id = f"{SENSOR_DOMAIN}.test_name_battery" + entity_id = f"{SENSOR_DOMAIN}.{entity}" await init_integration(hass, 1) - monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "battery", None) + assert hass.states.get(entity_id).state == initial_state + + monkeypatch.setattr(mock_block_device.blocks[block_id], attribute, value) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == final_value + + +@pytest.mark.parametrize( + ("lamp_life_seconds", "percentage"), + [ + (0 * 3600, "100.0"), # 0 hours, 100% remaining + (16 * 3600, "99.8222222222222"), + (4500 * 3600, "50.0"), # 4500 hours, 50% remaining + (9000 * 3600, "0.0"), # 9000 hours, 0% remaining + (10000 * 3600, "0.0"), # > 9000 hours, 0% remaining + ], +) +async def test_block_shelly_air_lamp_life( + hass: HomeAssistant, + mock_block_device: Mock, + monkeypatch: pytest.MonkeyPatch, + lamp_life_seconds: int, + percentage: float, +) -> None: + """Test block Shelly Air lamp life percentage sensor.""" + entity_id = f"{SENSOR_DOMAIN}.{'test_name_channel_1_lamp_life'}" + monkeypatch.setattr( + mock_block_device.blocks[RELAY_BLOCK_ID], "totalWorkTime", lamp_life_seconds + ) + await init_integration(hass, 1) + + assert (state := hass.states.get(entity_id)) + assert state.state == percentage async def test_rpc_sensor( @@ -365,17 +447,20 @@ async def test_rpc_sensor( entity_id = f"{SENSOR_DOMAIN}.test_cover_0_power" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "85.3" + assert (state := hass.states.get(entity_id)) + assert state.state == "85.3" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", "88.2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "88.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "88.2" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", None) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -389,7 +474,8 @@ async def test_rpc_rssi_sensor_removal( entry = await init_integration(hass, 2) # WiFi1 enabled, do not remove sensor - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" # WiFi1 & WiFi2 disabled - remove sensor monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta"], "enable", False) @@ -401,7 +487,9 @@ async def test_rpc_rssi_sensor_removal( monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta1"], "enable", True) await hass.config_entries.async_reload(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == "-63" + + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" async def test_rpc_illuminance_sensor( @@ -411,10 +499,10 @@ async def test_rpc_illuminance_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_illuminance" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "345" + assert (state := hass.states.get(entity_id)) + assert state.state == "345" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-illuminance:0-illuminance" @@ -428,17 +516,18 @@ async def test_rpc_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_voltmeter" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "4.321" + assert (state := hass.states.get(entity_id)) + assert state.state == "4.321" mutate_rpc_device_status( monkeypatch, mock_rpc_device, "voltmeter:100", "voltage", None ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter" @@ -453,15 +542,16 @@ async def test_rpc_polling_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "wifi-rssi") await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "wifi", "rssi", "-70") await mock_polling_rpc_update(hass, freezer) - assert hass.states.get(entity_id).state == "-70" + assert (state := hass.states.get(entity_id)) + assert state.state == "-70" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-wifi-rssi" @@ -492,12 +582,14 @@ async def test_rpc_sleeping_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "temperature:0", "tC", 23.4) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" async def test_rpc_restored_sleeping_sensor( @@ -525,7 +617,8 @@ async def test_rpc_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "21.0" + assert (state := hass.states.get(entity_id)) + assert state.state == "21.0" # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -536,7 +629,8 @@ async def test_rpc_restored_sleeping_sensor( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" async def test_rpc_restored_sleeping_sensor_no_last_state( @@ -562,7 +656,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -573,7 +668,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -583,36 +679,32 @@ async def test_rpc_em1_sensors( """Test RPC sensors for EM1 component.""" await init_integration(hass, 2) - state = hass.states.get("sensor.test_name_em0_power") - assert state + assert (state := hass.states.get("sensor.test_name_em0_power")) assert state.state == "85.3" - entry = entity_registry.async_get("sensor.test_name_em0_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em0_power")) assert entry.unique_id == "123456789ABC-em1:0-power_em1" - state = hass.states.get("sensor.test_name_em1_power") - assert state + assert (state := hass.states.get("sensor.test_name_em1_power")) assert state.state == "123.3" - entry = entity_registry.async_get("sensor.test_name_em1_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em1_power")) assert entry.unique_id == "123456789ABC-em1:1-power_em1" - state = hass.states.get("sensor.test_name_em0_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em0_total_active_energy")) assert state.state == "123.4564" - entry = entity_registry.async_get("sensor.test_name_em0_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em0_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:0-total_act_energy" - state = hass.states.get("sensor.test_name_em1_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em1_total_active_energy")) assert state.state == "987.6543" - entry = entity_registry.async_get("sensor.test_name_em1_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em1_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:1-total_act_energy" @@ -638,7 +730,7 @@ async def test_rpc_sleeping_update_entity_service( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" await hass.services.async_call( @@ -649,11 +741,10 @@ async def test_rpc_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-temperature:0-temperature_0" assert ( @@ -687,7 +778,8 @@ async def test_block_sleeping_update_entity_service( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" await hass.services.async_call( HA_DOMAIN, @@ -697,11 +789,10 @@ async def test_block_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" assert ( @@ -734,20 +825,18 @@ async def test_rpc_analog_input_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "8.9" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput_xpercent" @@ -782,7 +871,8 @@ async def test_rpc_disabled_xpercent( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" assert hass.states.get(entity_id) is None @@ -812,23 +902,20 @@ async def test_rpc_pulse_counter_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "56174" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "pulse" assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "561.74" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_value" @@ -863,7 +950,8 @@ async def test_rpc_disabled_xtotal_counter( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - assert hass.states.get(entity_id).state == "20635" + assert (state := hass.states.get(entity_id)) + assert state.state == "20635" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" assert hass.states.get(entity_id) is None @@ -893,23 +981,20 @@ async def test_rpc_pulse_counter_frequency_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "208.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfFrequency.HERTZ assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "6.11" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" @@ -932,11 +1017,9 @@ async def test_rpc_disabled_xfreq( entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -968,17 +1051,16 @@ async def test_rpc_device_virtual_text_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" async def test_rpc_remove_text_virtual_sensor_when_mode_field( @@ -1011,8 +1093,7 @@ async def test_rpc_remove_text_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_text_virtual_sensor_when_orphaned( @@ -1036,8 +1117,7 @@ async def test_rpc_remove_text_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1073,18 +1153,17 @@ async def test_rpc_device_virtual_number_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "34.5" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_number_virtual_sensor_when_mode_field( @@ -1122,8 +1201,7 @@ async def test_rpc_remove_number_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_number_virtual_sensor_when_orphaned( @@ -1147,8 +1225,7 @@ async def test_rpc_remove_number_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1188,19 +1265,18 @@ async def test_rpc_device_virtual_enum_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "two" + assert (state := hass.states.get(entity_id)) + assert state.state == "two" async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( @@ -1242,8 +1318,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_enum_virtual_sensor_when_orphaned( @@ -1267,8 +1342,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -1299,61 +1373,51 @@ async def test_rpc_rgbw_sensors( entity_id = f"sensor.test_name_{light_type}_light_0_power" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.2" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_energy" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.045141" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_current" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.23" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_voltage" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.4" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_device_temperature" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "54.3" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" @@ -1366,15 +1430,17 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( ) -> None: """Test RPC device with sensor goes unavailable on disconnect.""" await init_integration(hass, 2) - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state is not None - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) mock_rpc_device.mock_disconnected() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state == STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state == STATE_UNAVAILABLE freezer.tick(60) async_fire_time_changed(hass) @@ -1385,8 +1451,9 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE async def test_rpc_voltmeter_value( @@ -1399,13 +1466,11 @@ async def test_rpc_voltmeter_value( await init_integration(hass, 2) - state = hass.states.get(entity_id) - + assert (state := hass.states.get(entity_id)) assert state.state == "12.34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "ppm" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter_value" @@ -1450,8 +1515,7 @@ async def test_rpc_device_virtual_number_sensor_with_device_class( await init_integration(hass, 3) - state = hass.states.get("sensor.test_name_current_humidity") - assert state + assert (state := hass.states.get("sensor.test_name_current_humidity")) assert state.state == "34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 0425f883ad6..824742d1798 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -28,7 +28,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry -from . import get_entity_state, init_integration, register_device, register_entity +from . import init_integration, register_device, register_entity from tests.common import mock_restore_cache @@ -42,22 +42,25 @@ async def test_block_device_services( ) -> None: """Test block device turn on/off services.""" await init_integration(hass, 1) + entity_id = "switch.test_name_channel_1" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize("model", MOTION_MODELS) @@ -75,7 +78,8 @@ async def test_block_motion_switch( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # turn off await hass.services.async_call( @@ -88,7 +92,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(False) - assert get_entity_state(hass, entity_id) == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # turn on mock_block_device.set_shelly_motion_detection.reset_mock() @@ -102,7 +108,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(True) - assert get_entity_state(hass, entity_id) == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -132,14 +140,16 @@ async def test_block_restored_motion_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -167,14 +177,16 @@ async def test_block_restored_motion_switch_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize( @@ -205,8 +217,7 @@ async def test_block_device_unique_ids( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - entry = entity_registry.async_get(entity) - assert entry + assert (entry := entity_registry.async_get(entity)) assert entry.unique_id == unique_id @@ -221,7 +232,10 @@ async def test_block_set_state_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for switch.test_name_channel_1 of Test name", + ): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -270,11 +284,15 @@ async def test_block_device_update( """Test block device update.""" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", False) await init_integration(hass, 1) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + + entity_id = "switch.test_name_channel_1" + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", True) mock_block_device.mock_update() - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_block_device_no_relay_blocks( @@ -314,23 +332,26 @@ async def test_rpc_device_services( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) + entity_id = "switch.test_switch_0" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_switch_0").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setitem(mock_rpc_device.status["switch:0"], "output", False) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get("switch.test_switch_0").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_device_unique_ids( @@ -344,8 +365,7 @@ async def test_rpc_device_unique_ids( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - entry = entity_registry.async_get("switch.test_switch_0") - assert entry + assert (entry := entity_registry.async_get("switch.test_switch_0")) assert entry.unique_id == "123456789ABC-switch:0" @@ -357,13 +377,27 @@ async def test_rpc_device_switch_type_lights_mode( mock_rpc_device.config["sys"]["ui_data"], "consumption_types", ["lights"] ) await init_integration(hass, 2) + assert hass.states.get("switch.test_switch_0") is None -@pytest.mark.parametrize("exc", [DeviceConnectionError, RpcCallError(-1, "error")]) +@pytest.mark.parametrize( + ("exc", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for switch.test_switch_0 of Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while calling action for switch.test_switch_0 of Test name", + ), + ], +) async def test_rpc_set_state_errors( hass: HomeAssistant, exc: Exception, + error: str, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -373,7 +407,7 @@ async def test_rpc_set_state_errors( monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -447,7 +481,7 @@ async def test_wall_display_relay_mode( config_entry = await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - assert hass.states.get(climate_entity_id) is not None + assert (state := hass.states.get(climate_entity_id)) assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 new_status = deepcopy(mock_rpc_device.status) @@ -460,17 +494,16 @@ async def test_wall_display_relay_mode( await hass.async_block_till_done() # the climate entity should be removed + assert hass.states.get(climate_entity_id) is None assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 0 # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - entry = entity_registry.async_get(switch_entity_id) - assert entry + assert (entry := entity_registry.async_get(switch_entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -503,12 +536,10 @@ async def test_rpc_device_virtual_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:200-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) @@ -519,7 +550,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) await hass.services.async_call( @@ -529,7 +561,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_rpc_device_virtual_binary_sensor( @@ -550,8 +583,7 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None async def test_rpc_remove_virtual_switch_when_mode_label( @@ -584,8 +616,7 @@ async def test_rpc_remove_virtual_switch_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_switch_when_orphaned( @@ -609,8 +640,7 @@ async def test_rpc_remove_virtual_switch_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -640,11 +670,10 @@ async def test_rpc_device_script_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{key}-script" monkeypatch.setitem(mock_rpc_device.status[key], "running", False) @@ -655,8 +684,8 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status[key], "running", True) @@ -667,6 +696,6 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py index 19acb856f35..a4812cc4160 100644 --- a/tests/components/shelly/test_text.py +++ b/tests/components/shelly/test_text.py @@ -47,17 +47,17 @@ async def test_rpc_device_virtual_text( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") await hass.services.async_call( @@ -67,7 +67,9 @@ async def test_rpc_device_virtual_text( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "sed do eiusmod" + + assert (state := hass.states.get(entity_id)) + assert state.state == "sed do eiusmod" async def test_rpc_remove_virtual_text_when_mode_label( @@ -100,8 +102,7 @@ async def test_rpc_remove_virtual_text_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_text_when_orphaned( @@ -125,5 +126,4 @@ async def test_rpc_remove_virtual_text_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 9ea66c1acb7..51016f0cdaa 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -61,14 +61,16 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] - assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) await hass.services.async_call( UPDATE_DOMAIN, @@ -78,7 +80,7 @@ async def test_block_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" @@ -89,15 +91,14 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate" @@ -117,7 +118,7 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" @@ -129,7 +130,7 @@ async def test_block_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -145,7 +146,7 @@ async def test_block_beta_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -155,15 +156,14 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate_beta" @@ -184,14 +184,16 @@ async def test_block_update_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while triggering OTA update for Test name", + ): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert "Error starting OTA update" in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -254,11 +256,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -268,11 +271,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -296,7 +300,7 @@ async def test_rpc_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -314,7 +318,7 @@ async def test_rpc_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -337,7 +341,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 @@ -357,7 +361,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 @@ -378,15 +382,14 @@ async def test_rpc_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -417,7 +420,7 @@ async def test_rpc_sleeping_update( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -429,7 +432,7 @@ async def test_rpc_sleeping_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -437,8 +440,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -469,7 +471,7 @@ async def test_rpc_restored_sleeping_update( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -486,7 +488,7 @@ async def test_rpc_restored_sleeping_update( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -525,7 +527,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN # Make device online @@ -537,7 +539,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -567,7 +569,7 @@ async def test_rpc_beta_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" @@ -584,7 +586,7 @@ async def test_rpc_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -614,7 +616,7 @@ async def test_rpc_beta_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -637,7 +639,7 @@ async def test_rpc_beta_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 @@ -658,23 +660,28 @@ async def test_rpc_beta_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2b") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate_beta" @pytest.mark.parametrize( ("exc", "error"), [ - (DeviceConnectionError, "OTA update connection error: DeviceConnectionError()"), - (RpcCallError(-1, "error"), "OTA update request error"), + ( + DeviceConnectionError, + "Device communication error occurred while triggering OTA update for Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while triggering OTA update for Test name", + ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -701,14 +708,13 @@ async def test_rpc_update_errors( ) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert error in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") diff --git a/tests/components/shelly/test_valve.py b/tests/components/shelly/test_valve.py index 9dc8597120a..7bf9e3b5f1a 100644 --- a/tests/components/shelly/test_valve.py +++ b/tests/components/shelly/test_valve.py @@ -25,11 +25,11 @@ async def test_block_device_gas_valve( await init_integration(hass, 1, MODEL_GAS) entity_id = "valve.test_name_valve" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-valve_0-valve" - assert hass.states.get(entity_id).state == ValveState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -38,16 +38,14 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPENING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "opened") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPEN await hass.services.async_call( @@ -57,14 +55,12 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "closed") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSED diff --git a/tests/components/smartthings/__init__.py b/tests/components/smartthings/__init__.py index e87d1a8bcdf..fce344b57a7 100644 --- a/tests/components/smartthings/__init__.py +++ b/tests/components/smartthings/__init__.py @@ -3,7 +3,7 @@ from typing import Any from unittest.mock import AsyncMock -from pysmartthings.models import Attribute, Capability, DeviceEvent +from pysmartthings import Attribute, Capability, DeviceEvent from syrupy import SnapshotAssertion from homeassistant.components.smartthings.const import MAIN @@ -55,6 +55,7 @@ async def trigger_update( attribute: Attribute, value: str | float | dict[str, Any] | list[Any] | None, data: dict[str, Any] | None = None, + component: str = MAIN, ) -> None: """Trigger an update.""" event = DeviceEvent( @@ -62,7 +63,7 @@ async def trigger_update( "abc", "abc", device_id, - MAIN, + component, capability, attribute, value, diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 7f27d3eecc4..ef6b6f29011 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -4,12 +4,13 @@ from collections.abc import Generator import time from unittest.mock import AsyncMock, patch -from pysmartthings.models import ( +from pysmartthings import ( DeviceResponse, DeviceStatus, LocationResponse, RoomResponse, SceneResponse, + Subscription, ) import pytest @@ -82,11 +83,15 @@ def mock_smartthings() -> Generator[AsyncMock]: client.get_rooms.return_value = RoomResponse.from_json( load_fixture("rooms.json", DOMAIN) ).items + client.create_subscription.return_value = Subscription.from_json( + load_fixture("subscription.json", DOMAIN) + ) yield client @pytest.fixture( params=[ + "da_ac_airsensor_01001", "da_ac_rac_000001", "da_ac_rac_100001", "da_ac_rac_01001", @@ -101,13 +106,20 @@ def mock_smartthings() -> Generator[AsyncMock]: "centralite", "da_ref_normal_000001", "vd_network_audio_002s", + "vd_sensor_light_2023", "iphone", + "da_sac_ehs_000001_sub", "da_wm_dw_000001", "da_wm_wd_000001", + "da_wm_wd_000001_1", "da_wm_wm_000001", "da_wm_wm_000001_1", + "da_wm_sc_000001", "da_rvc_normal_000001", "da_ks_microwave_0101x", + "da_ks_cooktop_31001", + "da_ks_range_0101x", + "da_ks_oven_01061", "hue_color_temperature_bulb", "hue_rgbw_color_bulb", "c2c_shade", @@ -119,12 +131,19 @@ def mock_smartthings() -> Generator[AsyncMock]: "sensibo_airconditioner_1", "ecobee_sensor", "ecobee_thermostat", + "ecobee_thermostat_offline", "fake_fan", "generic_fan_3_speed", "heatit_ztrm3_thermostat", + "heatit_zpushwall", "generic_ef00_v1", "bosch_radiator_thermostat_ii", "im_speaker_ai_0001", + "abl_light_b_001", + "tplink_p110", + "ikea_kadrilj", + "aux_ac", + "hw_q80r_soundbar", ] ) def device_fixture( @@ -167,6 +186,7 @@ def mock_config_entry(expires_at: int) -> MockConfigEntry: CONF_INSTALLED_APP_ID: "123", }, version=3, + minor_version=2, ) diff --git a/tests/components/smartthings/fixtures/device_status/abl_light_b_001.json b/tests/components/smartthings/fixtures/device_status/abl_light_b_001.json new file mode 100644 index 00000000000..6dba85d7dc4 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/abl_light_b_001.json @@ -0,0 +1,27 @@ +{ + "components": { + "main": { + "switchLevel": { + "levelRange": { + "value": null + }, + "level": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + }, + "colorTemperature": { + "colorTemperatureRange": { + "value": null + }, + "colorTemperature": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/aux_ac.json b/tests/components/smartthings/fixtures/device_status/aux_ac.json new file mode 100644 index 00000000000..a3ebede7a10 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/aux_ac.json @@ -0,0 +1,69 @@ +{ + "components": { + "main": { + "partyvoice23922.vtempset": { + "vtemp": { + "value": 20, + "unit": "C", + "timestamp": "2024-12-05T20:03:33.161Z" + } + }, + "airConditionerFanMode": { + "fanMode": { + "value": "auto", + "timestamp": "2024-12-05T20:03:32.930Z" + }, + "supportedAcFanModes": { + "value": null + }, + "availableAcFanModes": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 20.0, + "unit": "C", + "timestamp": "2024-12-05T20:03:33.066Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": null + }, + "airConditionerMode": { + "value": "cool", + "timestamp": "2024-12-05T20:03:32.845Z" + } + }, + "fanSpeed": { + "fanSpeed": { + "value": 0, + "timestamp": "2024-12-05T20:03:33.334Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 20.0, + "unit": "C", + "timestamp": "2024-12-05T20:03:33.243Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2024-12-05T20:03:32.662Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/contact_sensor.json b/tests/components/smartthings/fixtures/device_status/contact_sensor.json index fa158d41b39..ca8c2628c99 100644 --- a/tests/components/smartthings/fixtures/device_status/contact_sensor.json +++ b/tests/components/smartthings/fixtures/device_status/contact_sensor.json @@ -36,7 +36,7 @@ "value": null }, "availableVersion": { - "value": "00000103", + "value": "00000104", "timestamp": "2025-02-09T13:59:19.101Z" }, "lastUpdateStatus": { diff --git a/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json b/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json new file mode 100644 index 00000000000..903b5163335 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json @@ -0,0 +1,362 @@ +{ + "components": { + "main": { + "samsungce.rechargeableBattery": { + "chargingStatus": { + "value": "charging", + "timestamp": "2025-02-18T05:20:27.966Z" + }, + "battery": { + "value": 100, + "unit": "%", + "timestamp": "2025-02-22T04:50:19.633Z" + }, + "resolution": { + "value": 1, + "timestamp": "2024-12-20T14:38:31.662Z" + } + }, + "relativeHumidityMeasurement": { + "humidity": { + "value": 54, + "unit": "%", + "timestamp": "2025-03-21T07:26:16.872Z" + } + }, + "refresh": {}, + "carbonDioxideHealthConcern": { + "carbonDioxideHealthConcern": { + "value": "moderate", + "timestamp": "2025-03-21T13:40:56.560Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.sensors"], + "if": ["oic.if.baseline", "oic.if.s"], + "x.com.samsung.da.cleanLevel": "2", + "x.com.samsung.da.refresh": "Off", + "x.com.samsung.da.lastSensingTime": "1740829045", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Sensor for CleanLevel", + "x.com.samsung.da.type": "CleanLevel", + "x.com.samsung.da.value": ["2"] + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Sensor for Odor", + "x.com.samsung.da.type": "Odor", + "x.com.samsung.da.value": ["2"] + }, + { + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "Sensor for Dust", + "x.com.samsung.da.type": "Dust", + "x.com.samsung.da.value": ["29", "1"] + }, + { + "x.com.samsung.da.id": "3", + "x.com.samsung.da.description": "Sensor for FineDust", + "x.com.samsung.da.type": "FineDust", + "x.com.samsung.da.value": ["7", "1"] + }, + { + "x.com.samsung.da.id": "4", + "x.com.samsung.da.description": "Sensor for SuperFineDust", + "x.com.samsung.da.type": "SuperFineDust", + "x.com.samsung.da.value": ["6", "1"] + }, + { + "x.com.samsung.da.id": "5", + "x.com.samsung.da.description": "Sensor for CO2", + "x.com.samsung.da.type": "CO2", + "x.com.samsung.da.value": ["2527", "3"] + } + ] + } + }, + "data": { + "href": "/sensors/vs/0" + }, + "timestamp": "2025-03-01T11:37:26.334Z" + } + }, + "carbonDioxideMeasurement": { + "carbonDioxide": { + "value": 1045, + "unit": "ppm", + "timestamp": "2025-03-21T15:05:44.312Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "ASM-KR-TP1-22-ACMB1M", + "timestamp": "2025-03-20T23:08:07.388Z" + } + }, + "airQualitySensor": { + "airQuality": { + "value": 2, + "unit": "CAQI", + "timestamp": "2025-03-21T15:06:39.609Z" + } + }, + "fineDustHealthConcern": { + "fineDustHealthConcern": { + "value": "good", + "timestamp": "2025-03-21T10:25:04.548Z" + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "ASM-KR-TP1-22-ACMB1M_16240426", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnhw": { + "value": "Realtek", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "di": { + "value": "a3a970ea-e09c-9c04-161b-94c934e21666", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "dmv": { + "value": "1.2.1", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "n": { + "value": "Samsung AirMonitor", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnmo": { + "value": "ASM-KR-TP1-22-ACMB1M|10243041|75000000001611C40800020000080000", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "vid": { + "value": "DA-AC-AIRSENSOR-01001", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnos": { + "value": "TizenRT 4.0", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "pi": { + "value": "a3a970ea-e09c-9c04-161b-94c934e21666", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-08-19T07:28:01.277Z" + } + }, + "odorSensor": { + "odorLevel": { + "value": 1, + "timestamp": "2025-03-21T13:29:15.650Z" + } + }, + "veryFineDustHealthConcern": { + "veryFineDustHealthConcern": { + "value": "good", + "timestamp": "2025-03-21T02:56:21.007Z" + } + }, + "samsungce.doNotDisturb": { + "settable": { + "value": true, + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "dayOfWeek": { + "value": null + }, + "repeatMode": { + "value": null + }, + "startTime": { + "value": "14:00:00Z", + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "endTime": { + "value": "22:00:00Z", + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "activated": { + "value": false, + "timestamp": "2024-12-20T14:38:31.895Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [], + "timestamp": "2025-03-01T11:37:26.334Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 22100101, + "timestamp": "2023-12-09T04:05:59.505Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "EXCHUODPSCTZY", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "AM0", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "protocolType": { + "value": "wifi_https", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "tsId": { + "value": null + }, + "mnId": { + "value": "0AJT", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2024-12-20T14:38:31.716Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 23.0, + "unit": "C", + "timestamp": "2025-03-21T04:40:33.951Z" + } + }, + "dustSensor": { + "dustLevel": { + "value": 31, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:39.609Z" + }, + "fineDustLevel": { + "value": 7, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:28.515Z" + } + }, + "veryFineDustSensor": { + "veryFineDustLevel": { + "value": 6, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:28.515Z" + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": "enabled", + "timestamp": "2024-12-20T14:38:31.769Z" + }, + "reportStateRealtime": { + "value": { + "state": "disabled" + }, + "timestamp": "2025-03-20T22:02:48.215Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2024-12-20T14:38:31.769Z" + } + }, + "dustHealthConcern": { + "dustHealthConcern": { + "value": "moderate", + "timestamp": "2025-03-21T15:06:39.609Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..5ca8f56fbbf --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json @@ -0,0 +1,508 @@ +{ + "components": { + "burner-02": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + }, + "burner-01": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.518Z" + } + } + }, + "main": { + "custom.disabledComponents": { + "disabledComponents": { + "value": ["burner-6"], + "timestamp": "2025-03-25T18:18:28.464Z" + } + }, + "custom.userNotification": { + "message": { + "value": null + } + }, + "samsungce.remoteManagementData": { + "reportRawData": { + "value": "AgUBASCgAwAACaEDAAAM4AQAAAAA4QHwAw==", + "timestamp": "2025-03-26T07:27:58.282Z" + }, + "version": { + "value": "CT-31.0001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "5828", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelName": { + "value": "NZ64B5046GK", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumber": { + "value": "B8C878DX900290H", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumberExtra": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelClassificationCode": { + "value": "50000204001611000E00000000000000", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "description": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP2X_DA-KS-COOKTOP-31001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-26T07:27:58.478Z" + } + }, + "samsungce.errorAndAlarmState": { + "events": { + "value": [], + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.cooktopFlexZone": { + "flexZones": { + "value": [], + "timestamp": "2025-03-26T05:57:23.671Z" + } + }, + "samsungce.softwareVersion": { + "versions": { + "value": [ + { + "id": "Wifi", + "swType": "Wifi-Application", + "versionNumber": "80001A220811", + "description": "Aug 11 2022 08:38:36, Wifi:ws029_030, STDK : 1.7.4)" + }, + { + "id": "Micom", + "swType": "Micom Software", + "versionNumber": "240617", + "description": "Description for this micom version" + } + ], + "timestamp": "2025-03-25T18:18:28.482Z" + } + }, + "healthCheck": { + "checkInterval": { + "value": null + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": null + } + }, + "custom.cooktopOperatingState": { + "supportedCooktopOperatingState": { + "value": ["ready", "run", "paused"], + "timestamp": "2025-03-26T07:26:39.690Z" + }, + "cooktopOperatingState": { + "value": "ready", + "timestamp": "2025-03-26T07:27:58.652Z" + } + }, + "samsungce.kitchenDeviceIdentification": { + "regionCode": { + "value": "EU", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "modelCode": { + "value": "OZ8500B/EU2", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "fuel": { + "value": null + }, + "type": { + "value": "cooktop", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "representativeComponent": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "JHCB2ZD4E2KRY", + "timestamp": "2025-03-25T18:18:28.482Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "samsungce.kidsLockControl": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-25T18:18:28.464Z" + } + } + }, + "burner-06": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "hood": { + "samsungce.connectionState": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.hoodFanSpeed": { + "settableMaxFanSpeed": { + "value": 5, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "hoodFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "supportedHoodFanSpeed": { + "value": [1, 2, 3, 4, 5], + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "settableMinFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.646Z" + }, + "status": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + }, + "samsungce.lamp": { + "brightnessLevel": { + "value": null + }, + "supportedBrightnessLevel": { + "value": ["off", "mid"], + "timestamp": "2025-03-25T18:18:28.650Z" + } + } + }, + "burner-05": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "burner-04": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:49:25.153Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.578Z" + } + } + }, + "burner-03": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T07:27:58.652Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ks_oven_01061.json b/tests/components/smartthings/fixtures/device_status/da_ks_oven_01061.json new file mode 100644 index 00000000000..b8b403ba908 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ks_oven_01061.json @@ -0,0 +1,566 @@ +{ + "components": { + "main": { + "ovenSetpoint": { + "ovenSetpointRange": { + "value": null + }, + "ovenSetpoint": { + "value": 220, + "timestamp": "2025-03-15T12:06:07.818Z" + } + }, + "refresh": {}, + "samsungce.doorState": { + "doorState": { + "value": "closed", + "timestamp": "2025-03-15T09:25:35.157Z" + } + }, + "samsungce.microwavePower": { + "supportedPowerLevels": { + "value": null + }, + "powerLevel": { + "value": "0W", + "timestamp": "2025-03-15T12:06:07.803Z" + } + }, + "samsungce.waterReservoir": { + "slotState": { + "value": null + } + }, + "samsungce.kitchenDeviceDefaults": { + "defaultOperationTime": { + "value": null + }, + "defaultOvenMode": { + "value": "Convection", + "timestamp": "2025-03-15T12:06:07.758Z" + }, + "defaultOvenSetpoint": { + "value": null + } + }, + "execute": { + "data": { + "value": null + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP1X_DA-KS-OVEN-01061", + "timestamp": "2025-03-13T20:35:02.073Z" + } + }, + "samsungce.ovenDrainageRequirement": { + "drainageRequirement": { + "value": null + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "AKS-WW-TP1X-21-OVEN_40211229", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnhw": { + "value": "Realtek", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "di": { + "value": "9447959a-0dfa-6b27-d40d-650da525c53f", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "n": { + "value": "[oven] Samsung", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnmo": { + "value": "TP1X_DA-KS-OVEN-01061|40457041|50030018001611000A00000000000000", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "vid": { + "value": "DA-KS-OVEN-01061", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnpv": { + "value": "DAWIT 3.0", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "mnos": { + "value": "TizenRT 3.1", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "pi": { + "value": "9447959a-0dfa-6b27-d40d-650da525c53f", + "timestamp": "2025-01-08T17:29:14.260Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-08T17:29:14.260Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "true", + "timestamp": "2025-03-15T09:47:55.406Z" + } + }, + "samsungce.kitchenDeviceIdentification": { + "regionCode": { + "value": "EU", + "timestamp": "2025-03-15T12:06:07.758Z" + }, + "modelCode": { + "value": "NQ7000B-/EU7", + "timestamp": "2025-03-15T12:06:07.758Z" + }, + "fuel": { + "value": null + }, + "type": { + "value": "oven", + "timestamp": "2025-01-08T17:29:12.924Z" + }, + "representativeComponent": { + "value": null + } + }, + "samsungce.kitchenModeSpecification": { + "specification": { + "value": { + "single": [ + { + "mode": "NoOperation", + "supportedOperations": [], + "supportedOptions": {} + }, + { + "mode": "Autocook", + "supportedOperations": [], + "supportedOptions": {} + }, + { + "mode": "Convection", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 230, + "default": 160, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "FanConventional", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 230, + "default": 180, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "LargeGrill", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 150, + "max": 230, + "default": 220, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "FanGrill", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 230, + "default": 180, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "MicroWaveGrill", + "supportedOperations": ["set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 200, + "default": 200, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:00:10", + "max": "01:30:00", + "default": "00:00:30", + "resolution": "00:00:10" + }, + "powerLevel": { + "default": "300W", + "supportedValues": ["100W", "180W", "300W", "450W", "600W"] + } + } + }, + { + "mode": "MicroWaveConvection", + "supportedOperations": ["set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 200, + "default": 180, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:00:10", + "max": "01:30:00", + "default": "00:00:30", + "resolution": "00:00:10" + }, + "powerLevel": { + "default": "300W", + "supportedValues": ["100W", "180W", "300W", "450W", "600W"] + } + } + }, + { + "mode": "AirFryer", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 150, + "max": 230, + "default": 220, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "MicroWave", + "supportedOperations": ["set"], + "supportedOptions": { + "operationTime": { + "min": "00:00:10", + "max": "01:30:00", + "default": "00:00:30", + "resolution": "00:00:10" + }, + "powerLevel": { + "default": "800W", + "supportedValues": [ + "100W", + "180W", + "300W", + "450W", + "600W", + "700W", + "800W" + ] + } + } + }, + { + "mode": "Deodorization", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "operationTime": { + "min": "00:00:10", + "max": "00:15:00", + "default": "00:05:00", + "resolution": "00:00:10" + } + } + }, + { + "mode": "KeepWarm", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 60, + "max": 100, + "default": 60, + "resolution": 5 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "10:00:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "SteamClean", + "supportedOperations": ["set"], + "supportedOptions": {} + } + ] + }, + "timestamp": "2025-01-08T17:29:14.757Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "samsungce.waterReservoir", + "samsungce.ovenDrainageRequirement" + ], + "timestamp": "2025-03-15T12:06:07.758Z" + } + }, + "samsungce.definedRecipe": { + "definedRecipe": { + "value": { + "cavityId": "0", + "recipeType": "0", + "categoryId": 0, + "itemId": 0, + "servingSize": 0, + "browingLevel": 0, + "option": 0 + }, + "timestamp": "2025-03-15T12:06:07.803Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 22100101, + "timestamp": "2025-01-08T17:29:12.924Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "43CB2ZD4VUEGW", + "timestamp": "2025-03-13T20:35:02.073Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-13T20:35:02.073Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-13T20:35:02.073Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 30, + "unit": "C", + "timestamp": "2025-03-15T12:06:32.918Z" + } + }, + "samsungce.ovenOperatingState": { + "completionTime": { + "value": "2025-03-15T12:06:09.550Z", + "timestamp": "2025-03-15T12:06:09.554Z" + }, + "operatingState": { + "value": "running", + "timestamp": "2025-03-15T12:06:07.866Z" + }, + "progress": { + "value": 0, + "timestamp": "2025-03-15T12:06:07.866Z" + }, + "ovenJobState": { + "value": "preheat", + "timestamp": "2025-03-15T12:06:07.803Z" + }, + "operationTime": { + "value": "00:00:00", + "timestamp": "2025-03-15T12:06:07.866Z" + } + }, + "ovenMode": { + "supportedOvenModes": { + "value": ["Others", "Bake", "Broil", "ConvectionBroil", "warming"], + "timestamp": "2025-01-08T17:29:14.757Z" + }, + "ovenMode": { + "value": "Bake", + "timestamp": "2025-03-15T12:06:07.758Z" + } + }, + "ovenOperatingState": { + "completionTime": { + "value": "2025-03-15T12:06:09.550Z", + "timestamp": "2025-03-15T12:06:09.554Z" + }, + "machineState": { + "value": "running", + "timestamp": "2025-03-15T12:06:07.866Z" + }, + "progress": { + "value": 0, + "unit": "%", + "timestamp": "2025-03-15T12:06:07.866Z" + }, + "supportedMachineStates": { + "value": null + }, + "ovenJobState": { + "value": "preheat", + "timestamp": "2025-03-15T12:06:07.803Z" + }, + "operationTime": { + "value": 0, + "timestamp": "2025-03-15T12:06:07.866Z" + } + }, + "samsungce.ovenMode": { + "supportedOvenModes": { + "value": [ + "NoOperation", + "Autocook", + "Convection", + "FanConventional", + "LargeGrill", + "FanGrill", + "MicroWaveGrill", + "MicroWaveConvection", + "AirFryer", + "MicroWave", + "Deodorization", + "KeepWarm", + "SteamClean" + ], + "timestamp": "2025-01-08T17:29:14.757Z" + }, + "ovenMode": { + "value": "Convection", + "timestamp": "2025-03-15T12:06:07.758Z" + } + }, + "samsungce.lamp": { + "brightnessLevel": { + "value": "high", + "timestamp": "2025-03-15T12:06:07.956Z" + }, + "supportedBrightnessLevel": { + "value": ["off", "high"], + "timestamp": "2025-03-15T12:06:07.758Z" + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-13T20:35:02.170Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ks_range_0101x.json b/tests/components/smartthings/fixtures/device_status/da_ks_range_0101x.json new file mode 100644 index 00000000000..6d15aa4696d --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ks_range_0101x.json @@ -0,0 +1,688 @@ +{ + "components": { + "cavity-01": { + "ovenSetpoint": { + "ovenSetpointRange": { + "value": null + }, + "ovenSetpoint": { + "value": 0, + "timestamp": "2022-02-21T22:37:06.976Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [], + "timestamp": "2022-09-07T22:35:34.197Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 175, + "unit": "F", + "timestamp": "2022-02-21T22:37:06.976Z" + } + }, + "samsungce.ovenOperatingState": { + "completionTime": { + "value": "2024-05-14T19:00:04.579Z", + "timestamp": "2024-05-14T19:00:04.584Z" + }, + "operatingState": { + "value": "ready", + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "progress": { + "value": 1, + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "ovenJobState": { + "value": "ready", + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "operationTime": { + "value": "00:00:00", + "timestamp": "2022-02-21T22:37:05.415Z" + } + }, + "samsungce.kitchenDeviceDefaults": { + "defaultOperationTime": { + "value": null + }, + "defaultOvenMode": { + "value": "ConvectionBake", + "timestamp": "2022-02-21T22:37:06.983Z" + }, + "defaultOvenSetpoint": { + "value": 350, + "timestamp": "2022-02-21T22:37:06.976Z" + } + }, + "custom.ovenCavityStatus": { + "ovenCavityStatus": { + "value": "off", + "timestamp": "2025-03-12T20:38:01.259Z" + } + }, + "ovenMode": { + "supportedOvenModes": { + "value": ["Others"], + "timestamp": "2022-02-21T22:37:08.409Z" + }, + "ovenMode": { + "value": "Others", + "timestamp": "2022-02-21T22:37:06.983Z" + } + }, + "ovenOperatingState": { + "completionTime": { + "value": "2024-05-14T19:00:04.579Z", + "timestamp": "2024-05-14T19:00:04.584Z" + }, + "machineState": { + "value": "ready", + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "progress": { + "value": 1, + "unit": "%", + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "supportedMachineStates": { + "value": null + }, + "ovenJobState": { + "value": "ready", + "timestamp": "2022-02-21T22:37:05.415Z" + }, + "operationTime": { + "value": 0, + "timestamp": "2022-02-21T22:37:05.415Z" + } + }, + "samsungce.ovenMode": { + "supportedOvenModes": { + "value": ["SelfClean", "SteamClean", "NoOperation"], + "timestamp": "2022-02-21T22:37:08.409Z" + }, + "ovenMode": { + "value": "NoOperation", + "timestamp": "2022-02-21T22:37:06.983Z" + } + } + }, + "main": { + "ovenSetpoint": { + "ovenSetpointRange": { + "value": null + }, + "ovenSetpoint": { + "value": 425, + "timestamp": "2025-03-13T21:42:23.492Z" + } + }, + "samsungce.meatProbe": { + "temperatureSetpoint": { + "value": 0, + "unit": "F", + "timestamp": "2022-02-21T22:37:02.619Z" + }, + "temperature": { + "value": 0, + "unit": "F", + "timestamp": "2022-02-21T22:37:02.619Z" + }, + "status": { + "value": "disconnected", + "timestamp": "2022-02-21T22:37:02.679Z" + } + }, + "refresh": {}, + "samsungce.doorState": { + "doorState": { + "value": "closed", + "timestamp": "2025-03-12T20:38:01.255Z" + } + }, + "samsungce.kitchenDeviceDefaults": { + "defaultOperationTime": { + "value": 3600, + "timestamp": "2025-03-13T21:23:24.771Z" + }, + "defaultOvenMode": { + "value": "ConvectionBake", + "timestamp": "2025-03-13T21:23:27.659Z" + }, + "defaultOvenSetpoint": { + "value": 350, + "timestamp": "2025-03-13T21:23:27.596Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.baseline", "oic.if.a"], + "x.com.samsung.da.modelNum": "TP1X_DA-KS-RANGE-0101X|40445041|5001011E03151101020000000000000", + "x.com.samsung.da.description": "TP1X_DA-KS-OVEN-01011", + "x.com.samsung.da.serialNum": "0J4D7DARB03393K", + "x.com.samsung.da.otnDUID": "ZPCNQWBWXI47Q", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Version", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02144A221005", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Version", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "20121600,FFFFFFFF", + "x.com.samsung.da.newVersionAvailable": "0" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2023-11-28T22:49:09.333Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP1X_DA-KS-RANGE-0101X", + "timestamp": "2025-03-12T20:40:29.034Z" + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "AKS-WW-TP1-20-OVEN-3-CR_40240205", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "mnhw": { + "value": "Realtek", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "di": { + "value": "2c3cbaa0-1899-5ddc-7b58-9d657bd48f18", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "dmv": { + "value": "1.2.1", + "timestamp": "2022-12-19T22:33:09.710Z" + }, + "n": { + "value": "Samsung Range", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "mnmo": { + "value": "TP1X_DA-KS-RANGE-0101X|40445041|5001011E031511010200000000000000", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "vid": { + "value": "DA-KS-RANGE-0101X", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "mnpv": { + "value": "DAWIT 3.0", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "mnos": { + "value": "TizenRT 3.1", + "timestamp": "2024-05-14T19:00:26.132Z" + }, + "pi": { + "value": "2c3cbaa0-1899-5ddc-7b58-9d657bd48f18", + "timestamp": "2022-02-21T22:37:02.282Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2022-02-21T22:37:02.282Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "true", + "timestamp": "2025-03-13T21:42:23.615Z" + } + }, + "samsungce.customRecipe": {}, + "samsungce.kitchenDeviceIdentification": { + "regionCode": { + "value": "US", + "timestamp": "2025-03-13T21:23:27.659Z" + }, + "modelCode": { + "value": "NE6516A-/AA0", + "timestamp": "2025-03-13T21:23:27.659Z" + }, + "fuel": { + "value": null + }, + "type": { + "value": "range", + "timestamp": "2022-02-21T22:37:02.487Z" + }, + "representativeComponent": { + "value": null + } + }, + "samsungce.kitchenModeSpecification": { + "specification": { + "value": { + "single": [ + { + "mode": "Bake", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 80, + "max": 285, + "default": 175, + "resolution": 0 + }, + "F": { + "min": 175, + "max": 550, + "default": 350, + "resolution": 0 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "09:59:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "Broil", + "supportedOperations": ["set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 61441, + "max": 61442, + "default": 61441, + "supportedValues": [61441, 61442] + }, + "F": { + "min": 61441, + "max": 61442, + "default": 61441, + "supportedValues": [61441, 61442] + } + } + } + }, + { + "mode": "ConvectionBake", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 80, + "max": 285, + "default": 160, + "resolution": 0 + }, + "F": { + "min": 175, + "max": 550, + "default": 325, + "resolution": 0 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "09:59:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "ConvectionRoast", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 80, + "max": 285, + "default": 160, + "resolution": 0 + }, + "F": { + "min": 175, + "max": 550, + "default": 325, + "resolution": 0 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "09:59:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "KeepWarm", + "supportedOperations": ["set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 80, + "max": 80, + "default": 80, + "supportedValues": [80] + }, + "F": { + "min": 175, + "max": 175, + "default": 175, + "supportedValues": [175] + } + } + } + }, + { + "mode": "BreadProof", + "supportedOperations": ["set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 35, + "max": 35, + "default": 35, + "supportedValues": [35] + }, + "F": { + "min": 95, + "max": 95, + "default": 95, + "supportedValues": [95] + } + } + } + }, + { + "mode": "AirFryer", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 175, + "max": 260, + "default": 220, + "resolution": 0 + }, + "F": { + "min": 350, + "max": 500, + "default": 425, + "resolution": 0 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "09:59:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "Dehydrate", + "supportedOperations": ["start", "set"], + "supportedOptions": { + "temperature": { + "C": { + "min": 40, + "max": 105, + "default": 65, + "resolution": 0 + }, + "F": { + "min": 100, + "max": 225, + "default": 150, + "resolution": 0 + } + }, + "operationTime": { + "min": "00:01:00", + "max": "09:59:00", + "default": "01:00:00", + "resolution": "00:01:00" + } + } + }, + { + "mode": "SelfClean", + "supportedOperations": [], + "supportedOptions": {} + }, + { + "mode": "SteamClean", + "supportedOperations": [], + "supportedOptions": {} + } + ] + }, + "timestamp": "2024-05-14T19:00:30.062Z" + } + }, + "custom.cooktopOperatingState": { + "supportedCooktopOperatingState": { + "value": ["run", "ready"], + "timestamp": "2022-02-21T22:37:05.293Z" + }, + "cooktopOperatingState": { + "value": "ready", + "timestamp": "2025-03-12T20:38:01.402Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [], + "timestamp": "2025-03-13T21:23:27.659Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 22100101, + "timestamp": "2022-11-01T21:37:51.304Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "ZPCNQWBWXI47Q", + "timestamp": "2025-03-12T20:38:01.262Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-12T20:38:01.262Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-12T20:38:01.262Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 425, + "unit": "F", + "timestamp": "2025-03-13T21:46:35.545Z" + } + }, + "samsungce.ovenOperatingState": { + "completionTime": { + "value": "2025-03-14T03:23:28.048Z", + "timestamp": "2025-03-13T22:09:29.052Z" + }, + "operatingState": { + "value": "running", + "timestamp": "2025-03-13T21:23:24.771Z" + }, + "progress": { + "value": 13, + "timestamp": "2025-03-13T22:06:35.591Z" + }, + "ovenJobState": { + "value": "cooking", + "timestamp": "2025-03-13T21:46:34.327Z" + }, + "operationTime": { + "value": "06:00:00", + "timestamp": "2025-03-13T21:23:24.771Z" + } + }, + "ovenMode": { + "supportedOvenModes": { + "value": [ + "Bake", + "Broil", + "ConvectionBake", + "ConvectionRoast", + "warming", + "Others", + "Dehydrate" + ], + "timestamp": "2025-03-12T20:38:01.259Z" + }, + "ovenMode": { + "value": "Bake", + "timestamp": "2025-03-13T21:23:27.659Z" + } + }, + "ovenOperatingState": { + "completionTime": { + "value": "2025-03-14T03:23:28.048Z", + "timestamp": "2025-03-13T22:09:29.052Z" + }, + "machineState": { + "value": "running", + "timestamp": "2025-03-13T21:23:24.771Z" + }, + "progress": { + "value": 13, + "unit": "%", + "timestamp": "2025-03-13T22:06:35.591Z" + }, + "supportedMachineStates": { + "value": null + }, + "ovenJobState": { + "value": "cooking", + "timestamp": "2025-03-13T21:46:34.327Z" + }, + "operationTime": { + "value": 21600, + "timestamp": "2025-03-13T21:23:24.771Z" + } + }, + "samsungce.ovenMode": { + "supportedOvenModes": { + "value": [ + "Bake", + "Broil", + "ConvectionBake", + "ConvectionRoast", + "KeepWarm", + "BreadProof", + "AirFryer", + "Dehydrate", + "SelfClean", + "SteamClean" + ], + "timestamp": "2025-03-12T20:38:01.259Z" + }, + "ovenMode": { + "value": "Bake", + "timestamp": "2025-03-13T21:23:27.659Z" + } + }, + "samsungce.lamp": { + "brightnessLevel": { + "value": "off", + "timestamp": "2025-03-13T21:23:27.659Z" + }, + "supportedBrightnessLevel": { + "value": ["off", "high"], + "timestamp": "2025-03-13T21:23:27.659Z" + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-12T20:38:01.400Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json b/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json new file mode 100644 index 00000000000..e27c6c3de21 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json @@ -0,0 +1,680 @@ +{ + "components": { + "main": { + "demandResponseLoadControl": { + "drlcStatus": { + "value": { + "drlcType": 1, + "drlcLevel": -1, + "start": "1970-01-01T00:00:00Z", + "duration": 0, + "override": false + }, + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 8193810.0, + "deltaEnergy": 0, + "power": 2.539, + "powerEnergy": 0.009404173966911105, + "persistedEnergy": 8193810.0, + "energySaved": 0, + "start": "2025-03-09T11:14:44Z", + "end": "2025-03-09T11:14:57Z" + }, + "timestamp": "2025-03-09T11:14:57.338Z" + } + }, + "samsungce.ehsCycleData": { + "outdoor": { + "value": [ + { + "timestamp": "2025-03-09T02:00:29Z", + "data": "0038003870FF3C3B46020218019A00050000" + }, + { + "timestamp": "2025-03-09T02:05:29Z", + "data": "0034003471FF3C3C46020218019A00050000" + }, + { + "timestamp": "2025-03-09T02:10:29Z", + "data": "002D002D71FF3D3D460201C9019A00050000" + } + ], + "unit": "C", + "timestamp": "2025-03-09T11:11:30.786Z" + }, + "indoor": { + "value": [ + { + "timestamp": "2025-03-09T02:00:29Z", + "data": "5F055C050505002564000000000000000001FFFF00079440" + }, + { + "timestamp": "2025-03-09T02:05:29Z", + "data": "60055E050505002563000000000000000001FFFF00079445" + }, + { + "timestamp": "2025-03-09T02:10:29Z", + "data": "61055F050505002560000000000000000001FFFF0007944B" + } + ], + "unit": "C", + "timestamp": "2025-03-09T11:11:30.786Z" + } + }, + "custom.outingMode": { + "outingMode": { + "value": "off", + "timestamp": "2025-03-09T08:00:05.571Z" + } + }, + "samsungce.ehsThermostat": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-09T08:00:05.562Z" + } + }, + "refresh": {}, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 40, + "unit": "C", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "maximumSetpoint": { + "value": 55, + "unit": "C", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": ["eco", "std", "force"], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "airConditionerMode": { + "value": "std", + "timestamp": "2025-03-09T08:00:05.562Z" + } + }, + "samsungce.ehsFsvSettings": { + "fsvSettings": { + "value": [ + { + "id": "1031", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 37, + "maxValue": 70, + "value": 70, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1032", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 15, + "maxValue": 37, + "value": 25, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1051", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 50, + "maxValue": 70, + "value": 55, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1052", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 30, + "maxValue": 40, + "value": 40, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2011", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": -20, + "maxValue": 5, + "value": -3, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2012", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 10, + "maxValue": 20, + "value": 15, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2021", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 50, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2022", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 32, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2031", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 50, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2032", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 38, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2091", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 4, + "value": 0, + "isValid": true + }, + { + "id": "2092", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 4, + "value": 0, + "isValid": true + }, + { + "id": "2093", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 1, + "maxValue": 4, + "value": 4, + "isValid": true + }, + { + "id": "3011", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 2, + "value": 2, + "isValid": true + }, + { + "id": "3071", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + }, + { + "id": "4011", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + }, + { + "id": "4012", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": -15, + "maxValue": 20, + "value": 0, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "4021", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 2, + "value": 0, + "isValid": true + }, + { + "id": "4042", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 5, + "maxValue": 15, + "value": 10, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "4061", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + } + ], + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.a"], + "x.com.samsung.da.modelNum": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "x.com.samsung.da.description": "EHS", + "x.com.samsung.da.serialNum": "", + "x.com.samsung.da.versionId": "Samsung Electronics", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.number": "DB91-02102A 2023-01-11", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Version" + }, + { + "x.com.samsung.da.number": "DB91-02100A 2020-07-10", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Version" + }, + { + "x.com.samsung.da.number": "DB91-02103B 2022-06-14", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "" + }, + { + "x.com.samsung.da.number": "DB91-02450A 2022-07-06", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "3", + "x.com.samsung.da.description": "EHS MONO LOWTEMP" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2023-08-02T14:32:28.435Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "SAC_EHS_MONO", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.sacDisplayCondition": { + "switch": { + "value": "enabled", + "timestamp": "2025-03-09T08:00:05.514Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:00:27.522Z" + } + }, + "ocf": { + "st": { + "value": "2025-03-06T08:37:35Z", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mndt": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnfv": { + "value": "20240611.1", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnhw": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "di": { + "value": "1f98ebd0-ac48-d802-7f62-000001200100", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "mnsl": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "n": { + "value": "Eco Heating System", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "mnmo": { + "value": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "vid": { + "value": "DA-SAC-EHS-000001-SUB", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnml": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnpv": { + "value": "4.0", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnos": { + "value": "Tizen", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "pi": { + "value": "1f98ebd0-ac48-d802-7f62-000001200100", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-03-09T08:18:05.955Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "true", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2023-08-02T14:36:25.480Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.toggleSwitch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:00:22.880Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": ["remoteControlStatus", "demandResponseLoadControl"], + "timestamp": "2025-03-09T08:31:30.641Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 23070101, + "timestamp": "2023-08-02T14:32:26.195Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": null + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 54.3, + "unit": "C", + "timestamp": "2025-03-09T10:43:24.134Z" + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": "enabled", + "timestamp": "2024-11-08T01:41:37.280Z" + }, + "reportStateRealtime": { + "value": { + "state": "disabled" + }, + "timestamp": "2025-03-08T12:06:55.069Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2024-11-08T01:41:37.280Z" + } + }, + "samsungce.ehsTemperatureReference": { + "temperatureReference": { + "value": "water", + "timestamp": "2025-03-09T07:15:48.438Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 48, + "unit": "C", + "timestamp": "2025-03-09T10:58:50.857Z" + } + } + }, + "INDOOR": { + "samsungce.ehsThermostat": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.toggleSwitch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:14:44.775Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 39.2, + "unit": "C", + "timestamp": "2025-03-09T11:15:49.852Z" + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 25, + "unit": "C", + "timestamp": "2025-03-09T07:06:20.699Z" + }, + "maximumSetpoint": { + "value": 65, + "unit": "C", + "timestamp": "2025-03-09T07:06:20.699Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": ["auto", "cool", "heat"], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "airConditionerMode": { + "value": "heat", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.ehsTemperatureReference": { + "temperatureReference": { + "value": "water", + "timestamp": "2025-03-09T07:06:20.699Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 25, + "unit": "C", + "timestamp": "2025-03-09T11:14:44.734Z" + } + }, + "samsungce.sacDisplayCondition": { + "switch": { + "value": "enabled", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:14:57.238Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json new file mode 100644 index 00000000000..d52b5186db3 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json @@ -0,0 +1,929 @@ +{ + "components": { + "main": { + "samsungce.welcomeMessage": { + "welcomeMessage": { + "value": null + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "20299141", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": "3801010200151107020100FF00000000", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "description": { + "value": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "DA_DF_TP2_20_COMMON", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetCycle": { + "supportedCycles": { + "value": [ + { + "cycle": "22", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "23", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "32", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "09", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "12", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "31", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "10", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "14", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "13", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "16", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "24", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "25", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "2F", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "20", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "0F", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "27", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "30", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "15", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "2D", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "07", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "08", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + } + ], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetCycle": { + "value": "Table_00_Course_22", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.quickControl": { + "version": { + "value": null + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "DA_DF_TP2_20_COMMON_30230807", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnhw": { + "value": "MediaTek", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "di": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "n": { + "value": "[airdresser] Samsung", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmo": { + "value": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "vid": { + "value": "DA-WM-SC-000001", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnos": { + "value": "TizenRT 2.0 + IPv6", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "pi": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + } + }, + "samsungce.steamClosetCyclePreset": { + "maxNumberOfPresets": { + "value": 10, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "presets": { + "value": { + "F1": {}, + "F2": {}, + "F3": {}, + "F4": {}, + "F5": {}, + "F6": {}, + "F7": {}, + "F8": {}, + "F9": {}, + "FA": {} + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "custom.steamClosetWrinklePrevent", + "custom.veryFineDustFilter", + "demandResponseLoadControl", + "sec.wifiConfiguration", + "samsungce.quickControl", + "samsungce.deviceInfoPrivate" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24110101, + "timestamp": "2024-12-02T07:55:47.237Z" + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "A00", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "protocolType": { + "value": "wifi_https", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "tsId": { + "value": null + }, + "mnId": { + "value": "0AJT", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetKeepFreshMode": { + "operatingState": { + "value": "ready", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": null + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 207500, + "deltaEnergy": 0, + "power": 0, + "powerEnergy": 0.0, + "persistedEnergy": 0, + "energySaved": 0, + "start": "2025-02-10T22:51:59Z", + "end": "2025-02-11T08:21:17Z" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "dryerOperatingState": { + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "machineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedMachineStates": { + "value": ["stop", "run", "pause"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dryerJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "refresh": {}, + "samsungce.steamClosetSanitizeMode": { + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.jobBeginningStatus": { + "jobBeginningStatus": { + "value": null + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.baseline", "oic.if.a"], + "x.com.samsung.da.modelNum": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "x.com.samsung.da.serialNum": "1EG158TW400002M", + "x.com.samsung.da.otnDUID": "MTCHUODP5V4FA", + "x.com.samsung.da.diagProtocolType": "WIFI_HTTPS", + "x.com.samsung.da.diagLogType": ["errCode", "dump"], + "x.com.samsung.da.diagDumpType": "file", + "x.com.samsung.da.diagEndPoint": "SSM", + "x.com.samsung.da.diagMnid": "0AJT", + "x.com.samsung.da.diagSetupid": "A00", + "x.com.samsung.da.diagMinVersion": "1.0", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02673A230807(F821)", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Firmware_1_DB_20299141210618090FFFFF202995412203111604FFFF(015E2029914120299541_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "21061809,22031116", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "Firmware_2_DB_2023564319111852041FFFFFFFFFFFFFFFFFFFFFFFFE(015E20235643FFFFFFFF_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "19111852,FFFFFFFF" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2024-03-06T11:24:05.312Z" + } + }, + "samsungce.steamClosetDelayEnd": { + "remainingTime": { + "value": 0, + "unit": "min", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetAutoCycleLink": { + "steamClosetAutoCycleLink": { + "value": "on", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "sec.wifiConfiguration": { + "autoReconnection": { + "value": null + }, + "minVersion": { + "value": null + }, + "supportedWiFiFreq": { + "value": null + }, + "supportedAuthType": { + "value": null + }, + "protocolType": { + "value": null + } + }, + "custom.steamClosetWrinklePrevent": { + "steamClosetWrinklePrevent": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "false", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.supportedOptions": { + "course": { + "value": null + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedCourses": { + "value": [ + "22", + "23", + "32", + "09", + "12", + "0C", + "31", + "0B", + "10", + "0A", + "14", + "13", + "16", + "24", + "25", + "2F", + "20", + "0F", + "27", + "30", + "15", + "1A", + "1B", + "1C", + "2D", + "07", + "08" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.steamClosetOperatingState": { + "supportedSteamClosetJobState": { + "value": ["none", "steaming", "airwashing", "drying", "finish"], + "timestamp": "2025-02-09T22:16:19.221Z" + }, + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetMachineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedSteamClosetMachineState": { + "value": ["stop", "run", "pause"], + "timestamp": "2023-06-23T16:00:41.238Z" + }, + "steamClosetJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "progress": { + "value": 1, + "unit": "%", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "remainingTimeStr": { + "value": "00:39", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "steamClosetDelayEndTime": { + "value": null + }, + "remainingTime": { + "value": 39, + "unit": "min", + "timestamp": "2025-02-10T22:53:25.928Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": {}, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "otnDUID": { + "value": "MTCHUODP5V4FA", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2023-06-23T16:00:41.636Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "custom.veryFineDustFilter": { + "veryFineDustFilterStatus": { + "value": null + }, + "veryFineDustFilterResetType": { + "value": null + }, + "veryFineDustFilterUsage": { + "value": null + }, + "veryFineDustFilterLastResetDate": { + "value": null + }, + "veryFineDustFilterUsageStep": { + "value": null + }, + "veryFineDustFilterCapacity": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_wm_wd_000001_1.json b/tests/components/smartthings/fixtures/device_status/da_wm_wd_000001_1.json new file mode 100644 index 00000000000..b45bac95237 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_wm_wd_000001_1.json @@ -0,0 +1,692 @@ +{ + "components": { + "hca.main": { + "hca.dryerMode": { + "mode": { + "value": "normal", + "timestamp": "2025-03-09T16:31:41.247Z" + }, + "supportedModes": { + "value": ["normal", "quickDry", "mix", "timeDry"], + "timestamp": "2025-03-09T16:31:40.486Z" + } + } + }, + "main": { + "custom.dryerWrinklePrevent": { + "operatingState": { + "value": "ready", + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "dryerWrinklePrevent": { + "value": "off", + "timestamp": "2025-03-09T16:31:41.077Z" + } + }, + "samsungce.dryerDryingTemperature": { + "dryingTemperature": { + "value": null, + "timestamp": "2021-04-02T18:31:36.756Z" + }, + "supportedDryingTemperature": { + "value": null, + "timestamp": "2021-04-02T18:29:52.258Z" + } + }, + "samsungce.welcomeMessage": { + "welcomeMessage": { + "value": null, + "timestamp": "2021-04-02T18:32:37.913Z" + } + }, + "samsungce.dongleSoftwareInstallation": { + "status": { + "value": "completed", + "timestamp": "2022-06-17T17:07:35.734Z" + } + }, + "samsungce.dryerCyclePreset": { + "maxNumberOfPresets": { + "value": 10, + "timestamp": "2025-03-09T16:31:41.229Z" + }, + "presets": { + "value": null, + "timestamp": "2021-04-02T18:30:36.772Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "20221341", + "timestamp": "2025-03-09T16:31:40.834Z" + }, + "modelName": { + "value": null, + "timestamp": "2021-04-02T18:29:53.622Z" + }, + "serialNumber": { + "value": null, + "timestamp": "2021-04-02T18:29:52.641Z" + }, + "serialNumberExtra": { + "value": null, + "timestamp": "2021-04-02T18:29:51.653Z" + }, + "modelClassificationCode": { + "value": "30010102001211000103000000000000", + "timestamp": "2025-03-09T16:31:40.834Z" + }, + "description": { + "value": "DA_WM_A51_20_COMMON_DV6800N/DC92-01967B_0404", + "timestamp": "2025-03-09T16:31:40.834Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "DA_WM_A51_20_COMMON", + "timestamp": "2025-03-09T19:07:40.295Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T19:47:36.549Z" + } + }, + "samsungce.quickControl": { + "version": { + "value": null + } + }, + "samsungce.dryerFreezePrevent": { + "operatingState": { + "value": null + } + }, + "ocf": { + "st": { + "value": null, + "timestamp": "2020-06-20T10:01:02.741Z" + }, + "mndt": { + "value": null, + "timestamp": "2020-06-25T01:53:25.278Z" + }, + "mnfv": { + "value": "DA_WM_A51_20_COMMON_30230708", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnhw": { + "value": "ARTIK051", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "di": { + "value": "3a6c4e05-811d-5041-e956-3d04c424cbcd", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "n": { + "value": "[dryer] Samsung", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnmo": { + "value": "DA_WM_A51_20_COMMON|20221341|30010102001211000103000000000000", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "vid": { + "value": "DA-WM-WD-000001", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "mnos": { + "value": "TizenRT 1.0 + IPv6", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "pi": { + "value": "3a6c4e05-811d-5041-e956-3d04c424cbcd", + "timestamp": "2024-12-15T10:53:49.561Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-12-15T10:53:49.561Z" + } + }, + "custom.dryerDryLevel": { + "dryerDryLevel": { + "value": "2", + "timestamp": "2025-03-09T19:47:36.806Z" + }, + "supportedDryerDryLevel": { + "value": ["none", "1", "2", "3"], + "timestamp": "2020-11-18T20:16:43.428Z" + } + }, + "samsungce.dryerAutoCycleLink": { + "dryerAutoCycleLink": { + "value": null, + "timestamp": "2020-08-11T12:41:38.646Z" + } + }, + "samsungce.dryerCycle": { + "dryerCycle": { + "value": "Table_00_Course_9A", + "timestamp": "2025-03-09T16:31:41.247Z" + }, + "supportedCycles": { + "value": [ + { + "cycle": "9A", + "supportedOptions": { + "dryingLevel": { + "raw": "D20E", + "default": "2", + "options": ["1", "2", "3"] + } + } + }, + { + "cycle": "CA", + "supportedOptions": { + "dryingLevel": { + "raw": "D10E", + "default": "1", + "options": ["1", "2", "3"] + } + } + }, + { + "cycle": "DB", + "supportedOptions": { + "dryingLevel": { + "raw": "D204", + "default": "2", + "options": ["2"] + } + } + }, + { + "cycle": "99", + "supportedOptions": { + "dryingLevel": { + "raw": "D20E", + "default": "2", + "options": ["1", "2", "3"] + } + } + }, + { + "cycle": "93", + "supportedOptions": { + "dryingLevel": { + "raw": "D102", + "default": "1", + "options": ["1"] + } + } + }, + { + "cycle": "B5", + "supportedOptions": { + "dryingLevel": { + "raw": "D102", + "default": "1", + "options": ["1"] + } + } + }, + { + "cycle": "D7", + "supportedOptions": { + "dryingLevel": { + "raw": "D204", + "default": "2", + "options": ["2"] + } + } + }, + { + "cycle": "A5", + "supportedOptions": { + "dryingLevel": { + "raw": "D204", + "default": "2", + "options": ["2"] + } + } + }, + { + "cycle": "96", + "supportedOptions": { + "dryingLevel": { + "raw": "D000", + "default": "none", + "options": [] + } + } + }, + { + "cycle": "97", + "supportedOptions": { + "dryingLevel": { + "raw": "D000", + "default": "none", + "options": [] + } + } + }, + { + "cycle": "7F", + "supportedOptions": { + "dryingLevel": { + "raw": "D000", + "default": "none", + "options": [] + } + } + }, + { + "cycle": "98", + "supportedOptions": { + "dryingLevel": { + "raw": "D000", + "default": "none", + "options": [] + } + } + }, + { + "cycle": "EB", + "supportedOptions": { + "dryingLevel": { + "raw": "D204", + "default": "2", + "options": ["2"] + } + } + }, + { + "cycle": "B6", + "supportedOptions": { + "dryingLevel": { + "raw": "D20E", + "default": "2", + "options": ["1", "2", "3"] + } + } + } + ], + "timestamp": "2025-02-10T02:24:03.524Z" + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-03-09T16:31:41.247Z" + }, + "specializedFunctionClassification": { + "value": 4, + "timestamp": "2025-03-09T16:31:40.486Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "samsungce.dryerDelayEnd", + "dryerOperatingState", + "samsungce.dryerCyclePreset", + "samsungce.welcomeMessage", + "samsungce.dongleSoftwareInstallation", + "sec.wifiConfiguration", + "samsungce.quickControl", + "samsungce.deviceInfoPrivate", + "demandResponseLoadControl", + "samsungce.dryerFreezePrevent", + "samsungce.dryerDryingTemperature", + "sec.diagnosticsInformation" + ], + "timestamp": "2024-07-02T14:42:38.334Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24110101, + "timestamp": "2024-12-02T07:43:41.263Z" + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": null + }, + "endpoint": { + "value": null + }, + "minVersion": { + "value": null + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": null + }, + "protocolType": { + "value": null + }, + "tsId": { + "value": null + }, + "mnId": { + "value": null + }, + "dumpType": { + "value": null + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-09T16:31:40.882Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": null + } + }, + "samsungce.detergentOrder": { + "alarmEnabled": { + "value": false, + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "orderThreshold": { + "value": 0, + "unit": "cc", + "timestamp": "2025-03-09T16:31:40.486Z" + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 796400, + "deltaEnergy": 0, + "power": 0, + "powerEnergy": 0.0, + "persistedEnergy": 0, + "energySaved": 0, + "start": "2025-03-09T19:47:26Z", + "end": "2025-03-09T19:47:37Z" + }, + "timestamp": "2025-03-09T19:47:37.283Z" + } + }, + "dryerOperatingState": { + "completionTime": { + "value": "2025-03-09T22:55:37Z", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "machineState": { + "value": "stop", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "supportedMachineStates": { + "value": ["stop", "run", "pause"], + "timestamp": "2025-03-09T16:31:41.172Z" + }, + "dryerJobState": { + "value": "none", + "timestamp": "2025-03-09T19:47:37.015Z" + } + }, + "samsungce.detergentState": { + "remainingAmount": { + "value": 0, + "unit": "cc", + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "dosage": { + "value": 0, + "unit": "cc", + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "initialAmount": { + "value": 0, + "unit": "cc", + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "detergentType": { + "value": "none", + "timestamp": "2021-04-02T18:29:51.428Z" + } + }, + "samsungce.dryerDelayEnd": { + "remainingTime": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-09T16:31:41.172Z" + } + }, + "refresh": {}, + "custom.jobBeginningStatus": { + "jobBeginningStatus": { + "value": null, + "timestamp": "2020-06-25T01:53:34.974Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.baseline", "oic.if.a"], + "x.com.samsung.da.modelNum": "DA_WM_A51_20_COMMON|20221341|30010102001211000103000000000000", + "x.com.samsung.da.description": "DA_WM_A51_20_COMMON_DV6800N/DC92-01967B_0404", + "x.com.samsung.da.serialNum": "0T625AEN100200N", + "x.com.samsung.da.otnDUID": "SHCDM6YAPCCXC", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "DA_WM_A51_20_COMMON|20221341|30010102001211000103000000000000", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02198A220728(E256)", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "DA_WM_A51_20_COMMON", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "17111305,19060420", + "x.com.samsung.da.newVersionAvailable": "0" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2023-08-07T00:06:05.984Z" + } + }, + "sec.wifiConfiguration": { + "autoReconnection": { + "value": null + }, + "minVersion": { + "value": null + }, + "supportedWiFiFreq": { + "value": null + }, + "supportedAuthType": { + "value": null + }, + "protocolType": { + "value": null + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "false", + "timestamp": "2025-03-09T16:31:41.180Z" + } + }, + "custom.supportedOptions": { + "course": { + "value": null + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-03-09T16:31:41.247Z" + }, + "supportedCourses": { + "value": [ + "9A", + "CA", + "DB", + "99", + "93", + "B5", + "D7", + "A5", + "96", + "97", + "7F", + "98", + "EB", + "B6" + ], + "timestamp": "2025-03-09T16:31:40.486Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2022-06-17T17:07:35.734Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2022-06-17T17:07:35.734Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.dryerOperatingState": { + "operatingState": { + "value": "ready", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "supportedOperatingStates": { + "value": ["ready", "running", "paused"], + "timestamp": "2022-11-01T12:48:22.390Z" + }, + "scheduledJobs": { + "value": [ + { + "jobName": "drying", + "timeInMin": 192 + }, + { + "jobName": "cooling", + "timeInMin": 1 + } + ], + "timestamp": "2025-03-09T16:31:40.486Z" + }, + "progress": { + "value": 1, + "unit": "%", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "remainingTimeStr": { + "value": "03:08", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "dryerJobState": { + "value": "none", + "timestamp": "2025-03-09T19:47:37.015Z" + }, + "remainingTime": { + "value": 188, + "unit": "min", + "timestamp": "2025-03-09T19:47:37.015Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "SHCDM6YAPCCXC", + "timestamp": "2025-03-09T16:31:40.834Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2024-12-01T21:16:50.598Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2024-12-01T21:16:50.598Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "samsungce.dryerDryingTime": { + "supportedDryingTime": { + "value": ["0", "30", "60", "90", "120", "150"], + "timestamp": "2021-04-02T18:29:51.428Z" + }, + "dryingTime": { + "value": "0", + "unit": "min", + "timestamp": "2025-03-09T16:31:41.077Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/ecobee_thermostat_offline.json b/tests/components/smartthings/fixtures/device_status/ecobee_thermostat_offline.json new file mode 100644 index 00000000000..fdda31783f6 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/ecobee_thermostat_offline.json @@ -0,0 +1,81 @@ +{ + "components": { + "main": { + "relativeHumidityMeasurement": { + "humidity": { + "value": null + } + }, + "thermostatOperatingState": { + "thermostatOperatingState": { + "value": null + } + }, + "healthCheck": { + "checkInterval": { + "value": 60, + "unit": "s", + "data": { + "deviceScheme": "UNTRACKED", + "protocol": "cloud" + }, + "timestamp": "2025-03-10T00:57:26.866Z" + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": "offline", + "data": { + "reason": "DEVICE-OFFLINE" + }, + "timestamp": "2025-03-11T10:22:17.013Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": null + } + }, + "thermostatHeatingSetpoint": { + "heatingSetpoint": { + "value": null + }, + "heatingSetpointRange": { + "value": null + } + }, + "thermostatFanMode": { + "thermostatFanMode": { + "value": null + }, + "supportedThermostatFanModes": { + "value": null + } + }, + "refresh": {}, + "thermostatMode": { + "thermostatMode": { + "value": null + }, + "supportedThermostatModes": { + "value": null + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json b/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json new file mode 100644 index 00000000000..591d1128ea0 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json @@ -0,0 +1,116 @@ +{ + "components": { + "button4": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-02-10T08:01:11.326Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.695Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.717Z" + } + } + }, + "button5": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-03-09T16:37:40.792Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.762Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.813Z" + } + } + }, + "button2": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-02-10T08:00:57.171Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.861Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.906Z" + } + } + }, + "button3": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-01-30T05:53:00.663Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.852Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.848Z" + } + } + }, + "button6": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2024-10-02T13:11:07.346Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.816Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.848Z" + } + } + }, + "main": { + "refresh": {}, + "battery": { + "quantity": { + "value": null + }, + "battery": { + "value": 100, + "unit": "%", + "timestamp": "2025-03-10T10:32:19.528Z" + }, + "type": { + "value": null + } + } + }, + "button1": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-01-30T05:52:46.718Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.717Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.767Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json new file mode 100644 index 00000000000..8cd0d3e35a9 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json @@ -0,0 +1,173 @@ +{ + "components": { + "main": { + "mediaPlayback": { + "supportedPlaybackCommands": { + "value": ["play", "pause", "stop"], + "timestamp": "2025-03-23T01:10:02.207Z" + }, + "playbackStatus": { + "value": "playing", + "timestamp": "2025-03-23T01:19:44.622Z" + } + }, + "samsungvd.groupInfo": { + "role": { + "value": "none", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "channel": { + "value": "all", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "masterName": { + "value": "", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "status": { + "value": "single", + "timestamp": "2025-03-23T01:17:10.965Z" + } + }, + "audioVolume": { + "volume": { + "value": 1, + "unit": "%", + "timestamp": "2025-03-23T01:17:13.754Z" + } + }, + "ocf": { + "st": { + "value": "NONE", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mndt": { + "value": "2018-01-01", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnfv": { + "value": "HW-Q80RWWB-1012.6", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnhw": { + "value": "0-0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "di": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnsl": { + "value": "http://www.samsung.com/sec/audio-video/", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "n": { + "value": "[AV] Samsung Soundbar Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmo": { + "value": "Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "vid": { + "value": "VD-NetworkAudio-001S", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnpv": { + "value": "Tizen 4.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnos": { + "value": "4.1.10", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "pi": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + } + }, + "mediaInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wifi", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "refresh": {}, + "audioNotification": {}, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-23T01:17:11.024Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.networkaudio.soundmode"], + "if": ["oic.if.a", "oic.if.baseline"], + "x.com.samsung.networkaudio.soundmode": "standard" + } + }, + "data": { + "href": "/sec/networkaudio/soundmode" + }, + "timestamp": "2023-07-16T23:16:55.582Z" + } + }, + "samsungvd.audioInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wificp", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "switch": { + "switch": { + "value": "on", + "timestamp": "2025-03-23T01:19:44.837Z" + } + }, + "audioTrackData": { + "totalTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + }, + "audioTrackData": { + "value": { + "title": "Never Gonna Give You Up", + "artist": "Rick Astley" + }, + "timestamp": "2025-03-23T01:19:15.067Z" + }, + "elapsedTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/ikea_kadrilj.json b/tests/components/smartthings/fixtures/device_status/ikea_kadrilj.json new file mode 100644 index 00000000000..56a2d9e762d --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/ikea_kadrilj.json @@ -0,0 +1,68 @@ +{ + "components": { + "main": { + "windowShadeLevel": { + "shadeLevel": { + "value": 32, + "unit": "%", + "timestamp": "2025-03-13T10:40:25.613Z" + } + }, + "refresh": {}, + "windowShadePreset": {}, + "battery": { + "quantity": { + "value": null + }, + "battery": { + "value": 37, + "unit": "%", + "timestamp": "2025-03-13T07:09:05.149Z" + }, + "type": { + "value": null + } + }, + "firmwareUpdate": { + "lastUpdateStatusReason": { + "value": null + }, + "availableVersion": { + "value": "22007631", + "timestamp": "2025-03-12T20:35:04.576Z" + }, + "lastUpdateStatus": { + "value": null + }, + "supportedCommands": { + "value": null + }, + "state": { + "value": "updateRequested", + "timestamp": "2025-03-12T20:35:03.879Z" + }, + "updateAvailable": { + "value": false, + "timestamp": "2025-03-12T20:35:04.577Z" + }, + "currentVersion": { + "value": "22007631", + "timestamp": "2025-03-12T20:35:04.508Z" + }, + "lastUpdateTime": { + "value": null + } + }, + "windowShade": { + "supportedWindowShadeCommands": { + "value": ["open", "close", "pause"], + "timestamp": "2025-03-13T10:33:48.402Z" + }, + "windowShade": { + "value": "partially open", + "timestamp": "2025-03-13T10:55:58.205Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/tplink_p110.json b/tests/components/smartthings/fixtures/device_status/tplink_p110.json new file mode 100644 index 00000000000..9e1d41ed66e --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/tplink_p110.json @@ -0,0 +1,46 @@ +{ + "components": { + "main": { + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "start": "2025-03-10T14:43:42.500Z", + "end": "2025-03-10T14:59:42.500Z", + "energy": 15720, + "deltaEnergy": 0 + }, + "timestamp": "2025-03-10T14:59:50.010Z" + } + }, + "healthCheck": { + "checkInterval": { + "value": 60, + "unit": "s", + "data": { + "deviceScheme": "UNTRACKED", + "protocol": "cloud" + }, + "timestamp": "2024-03-07T21:14:59.839Z" + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": "online", + "data": {}, + "timestamp": "2025-03-10T14:14:37.232Z" + } + }, + "refresh": {}, + "switch": { + "switch": { + "value": "on", + "timestamp": "2025-03-10T14:14:37.232Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json new file mode 100644 index 00000000000..cffefa20c4a --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json @@ -0,0 +1,95 @@ +{ + "components": { + "main": { + "ocf": { + "st": { + "value": "2025-01-14T08:07:36Z", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mndt": { + "value": "2023-01-01", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnfv": { + "value": "latest", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnhw": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "di": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnsl": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "n": { + "value": "Light Sensor - 55 The Frame", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmo": { + "value": "QE55LS03DAUXXN", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "vid": { + "value": "VD-Sensor.Light-2023", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnml": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnpv": { + "value": "8.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnos": { + "value": "Tizen", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "pi": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + } + }, + "samsungvd.deviceCategory": { + "category": { + "value": null + } + }, + "relativeBrightness": { + "brightnessIntensity": { + "value": 2, + "unit": "level", + "timestamp": "2025-02-11T19:08:25.539Z" + } + }, + "refresh": {}, + "execute": { + "data": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/devices/abl_light_b_001.json b/tests/components/smartthings/fixtures/devices/abl_light_b_001.json new file mode 100644 index 00000000000..bb4970b6d5a --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/abl_light_b_001.json @@ -0,0 +1,59 @@ +{ + "items": [ + { + "deviceId": "7c16163e-c94e-482f-95f6-139ae0cd9d5e", + "name": "ABL Wafer Down Light(BLE)", + "label": "Kitchen Light 5", + "manufacturerName": "Samsung Electronics", + "presentationId": "ABL-LIGHT-B-001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "6c314222-8baf-48a0-9442-5b1102a8757f", + "ownerId": "f24ff388-700c-7d1e-91f2-1c37ae68ce2b", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "colorTemperature", + "version": 1 + }, + { + "id": "switchLevel", + "version": 1 + } + ], + "categories": [ + { + "name": "Light", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-03-08T22:40:25.073Z", + "profile": { + "id": "65f5db53-9a78-4b19-8e40-d32187cd59ab" + }, + "bleD2D": { + "encryptionKey": "f593369dcea915f6352a4a42cd4b2ea6", + "cipher": "AES_128-CBC-PKCS7Padding", + "advertisingId": "b13d7192", + "identifier": "88-57-1d-7c-cb-cf", + "configurationUrl": "https://apis.samsungiotcloud.com/v1/miniature/profile/65f5db53-9a78-4b19-8e40-d32187cd59ab", + "bleDeviceType": "BLE", + "metadata": null + }, + "type": "BLE_D2D", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/aux_ac.json b/tests/components/smartthings/fixtures/devices/aux_ac.json new file mode 100644 index 00000000000..fcdb581748c --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/aux_ac.json @@ -0,0 +1,81 @@ +{ + "items": [ + { + "deviceId": "bf53a150-f8a4-45d1-aac4-86252475d551", + "name": "vedgeaircon.v1", + "label": "AUX A/C on-off", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "ab252042-5669-3c2c-8b1b-d606bbcc9e04", + "deviceManufacturerCode": "SmartThings Community", + "locationId": "5db1e3d8-ea26-44b4-8ed0-1ba9c841fd57", + "ownerId": "5404aa57-6a68-4fe2-83ff-168ef769d1c7", + "roomId": "564cdd9a-fa9f-4187-902f-95656ef22989", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "airConditionerFanMode", + "version": 1 + }, + { + "id": "fanSpeed", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "partyvoice23922.vtempset", + "version": 1 + } + ], + "categories": [ + { + "name": "AirConditioner", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-06-19T20:18:45.407Z", + "parentDeviceId": "e699599d-30f8-4cf0-8de7-6dbdba6a665f", + "profile": { + "id": "87f0ac35-e024-3c0a-8153-78ca27a6fe0c" + }, + "lan": { + "networkId": "vEdge_A/C_1718828324.999", + "driverId": "0fd9a9a4-8863-4a83-97a7-5a288ff0f5a6", + "executingLocally": true, + "hubId": "e699599d-30f8-4cf0-8de7-6dbdba6a665f", + "provisioningState": "TYPED" + }, + "type": "LAN", + "restrictionTier": 0, + "allowed": null, + "indoorMap": { + "coordinates": [130.0, 36.0, 378.0], + "rotation": [270.0, 0.0, 0.0], + "visible": true, + "data": null + }, + "executionContext": "LOCAL", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/contact_sensor.json b/tests/components/smartthings/fixtures/devices/contact_sensor.json index 68070abbfc3..9823a70cb61 100644 --- a/tests/components/smartthings/fixtures/devices/contact_sensor.json +++ b/tests/components/smartthings/fixtures/devices/contact_sensor.json @@ -42,7 +42,7 @@ "categoryType": "manufacturer" }, { - "name": "ContactSensor", + "name": "GarageDoor", "categoryType": "user" } ] diff --git a/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json b/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json new file mode 100644 index 00000000000..c8304e9c6d8 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json @@ -0,0 +1,145 @@ +{ + "items": [ + { + "deviceId": "a3a970ea-e09c-9c04-161b-94c934e21666", + "name": "Samsung AirMonitor", + "label": "\uc5d0\uc5b4\ubaa8\ub2c8\ud130 \ud50c\ub7ec\uc2a4", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-AC-AIRSENSOR-01001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "33db9e71-abe9-43a0-acd3-3f0927bbe5b7", + "ownerId": "9a1ee192-04ba-46ca-9c3d-196d8dbcf807", + "roomId": "445c006d-1796-4dd6-8308-1c3cd045e8ff", + "deviceTypeName": "x.com.st.d.airqualitysensor", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "relativeHumidityMeasurement", + "version": 1 + }, + { + "id": "dustSensor", + "version": 1 + }, + { + "id": "dustHealthConcern", + "version": 1 + }, + { + "id": "fineDustHealthConcern", + "version": 1 + }, + { + "id": "veryFineDustSensor", + "version": 1 + }, + { + "id": "veryFineDustHealthConcern", + "version": 1 + }, + { + "id": "airQualitySensor", + "version": 1 + }, + { + "id": "odorSensor", + "version": 1 + }, + { + "id": "carbonDioxideMeasurement", + "version": 1 + }, + { + "id": "carbonDioxideHealthConcern", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.rechargeableBattery", + "version": 1 + }, + { + "id": "samsungce.doNotDisturb", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + } + ], + "categories": [ + { + "name": "AirQualityDetector", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-12-09T04:05:59.040Z", + "profile": { + "id": "1d34dd9d-6840-3df6-a6d0-5d9f4a4af2e1" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.airqualitysensor", + "name": "Samsung AirMonitor", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "ASM-KR-TP1-22-ACMB1M|10243041|75000000001611C40800020000080000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 4.0", + "hwVersion": "Realtek", + "firmwareVersion": "ASM-KR-TP1-22-ACMB1M_16240426", + "vendorId": "DA-AC-AIRSENSOR-01001", + "vendorResourceClientServerVersion": "MediaTek Release 240426", + "lastSignupTime": "2023-12-09T04:05:54.816486Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..433e45dae7a --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json @@ -0,0 +1,277 @@ +{ + "items": [ + { + "deviceId": "808dbd84-f357-47e2-a0cd-3b66fa22d584", + "name": "Builtin Cooktop", + "label": "Induction Hob", + "manufacturerName": "0A4H", + "presentationId": "DA-KS-COOKTOP-31001", + "deviceManufacturerCode": "0A4H", + "locationId": "7d27161a-0ef6-4294-91a0-80054ea5bc59", + "ownerId": "d52fb883-0f76-f4d9-0f6a-7ec2c0987b11", + "roomId": "afe14ff1-d444-420d-a766-4dd52f3e1c71", + "deviceTypeId": "Cooktop", + "deviceTypeName": "Samsung Cooktop", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "healthCheck", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "custom.disabledComponents", + "version": 1 + }, + { + "id": "custom.userNotification", + "version": 1 + }, + { + "id": "custom.cooktopOperatingState", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceIdentification", + "version": 1 + }, + { + "id": "samsungce.softwareVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.errorAndAlarmState", + "version": 1 + }, + { + "id": "samsungce.remoteManagementData", + "version": 1 + }, + { + "id": "samsungce.kidsLockControl", + "version": 1 + }, + { + "id": "samsungce.cooktopFlexZone", + "version": 1 + } + ], + "categories": [ + { + "name": "Cooktop", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-01", + "label": "burner-01", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-02", + "label": "burner-02", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-03", + "label": "burner-03", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-04", + "label": "burner-04", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-05", + "label": "burner-05", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-06", + "label": "burner-06", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "hood", + "label": "hood", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "samsungce.connectionState", + "version": 1 + }, + { + "id": "samsungce.hoodFanSpeed", + "version": 1 + }, + { + "id": "samsungce.lamp", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-03-25T18:18:23.576Z", + "profile": { + "id": "a99bbcb8-51c9-468d-b9d5-0ce6dca09d5a" + }, + "mqtt": { + "executingLocally": false, + "transferCandidate": false + }, + "type": "MQTT", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ks_oven_01061.json b/tests/components/smartthings/fixtures/devices/da_ks_oven_01061.json new file mode 100644 index 00000000000..e82e28d2275 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ks_oven_01061.json @@ -0,0 +1,153 @@ +{ + "items": [ + { + "deviceId": "9447959a-0dfa-6b27-d40d-650da525c53f", + "name": "[oven] Samsung", + "label": "Oven", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-KS-OVEN-01061", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "a81dc8da-5a3f-43b6-8c8a-1309f37eeeb9", + "ownerId": "97ee2149-9de0-3287-8245-24d6fd1609aa", + "roomId": "eb2167dd-8b8d-4131-b59e-5dd391b2e151", + "deviceTypeName": "Samsung OCF Oven", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "ovenSetpoint", + "version": 1 + }, + { + "id": "ovenMode", + "version": 1 + }, + { + "id": "ovenOperatingState", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.doorState", + "version": 1 + }, + { + "id": "samsungce.definedRecipe", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceIdentification", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceDefaults", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.ovenMode", + "version": 1 + }, + { + "id": "samsungce.ovenOperatingState", + "version": 1 + }, + { + "id": "samsungce.microwavePower", + "version": 1 + }, + { + "id": "samsungce.lamp", + "version": 1 + }, + { + "id": "samsungce.kitchenModeSpecification", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.waterReservoir", + "version": 1 + }, + { + "id": "samsungce.ovenDrainageRequirement", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Oven", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-01-08T17:29:12.549Z", + "profile": { + "id": "eb34598f-f96a-3420-a90a-71693052eaa3" + }, + "ocf": { + "ocfDeviceType": "oic.d.oven", + "name": "[oven] Samsung", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "TP1X_DA-KS-OVEN-01061|40457041|50030018001611000A00000000000000", + "platformVersion": "DAWIT 3.0", + "platformOS": "TizenRT 3.1", + "hwVersion": "Realtek", + "firmwareVersion": "AKS-WW-TP1X-21-OVEN_40211229", + "vendorId": "DA-KS-OVEN-01061", + "vendorResourceClientServerVersion": "Realtek Release 3.1.211122", + "lastSignupTime": "2025-01-08T17:29:08.536664213Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ks_range_0101x.json b/tests/components/smartthings/fixtures/devices/da_ks_range_0101x.json new file mode 100644 index 00000000000..e918e2d77ca --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ks_range_0101x.json @@ -0,0 +1,197 @@ +{ + "items": [ + { + "deviceId": "2c3cbaa0-1899-5ddc-7b58-9d657bd48f18", + "name": "Samsung Range", + "label": "Vulcan", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-KS-RANGE-0101X", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "597a4912-13c9-47ab-9956-7ebc38b61abd", + "ownerId": "c4478c70-9014-e5c9-993c-f62707fa1e61", + "roomId": "fc407cd9-3b32-4fc0-bf23-e0d4995101e9", + "deviceTypeName": "Samsung OCF Range", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "ovenSetpoint", + "version": 1 + }, + { + "id": "ovenMode", + "version": 1 + }, + { + "id": "ovenOperatingState", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceIdentification", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceDefaults", + "version": 1 + }, + { + "id": "samsungce.doorState", + "version": 1 + }, + { + "id": "samsungce.customRecipe", + "version": 1 + }, + { + "id": "samsungce.ovenMode", + "version": 1 + }, + { + "id": "samsungce.ovenOperatingState", + "version": 1 + }, + { + "id": "samsungce.meatProbe", + "version": 1 + }, + { + "id": "samsungce.lamp", + "version": 1 + }, + { + "id": "samsungce.kitchenModeSpecification", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "custom.cooktopOperatingState", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Range", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "cavity-01", + "label": "cavity-01", + "capabilities": [ + { + "id": "ovenSetpoint", + "version": 1 + }, + { + "id": "ovenMode", + "version": 1 + }, + { + "id": "ovenOperatingState", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "samsungce.ovenMode", + "version": 1 + }, + { + "id": "samsungce.ovenOperatingState", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceDefaults", + "version": 1 + }, + { + "id": "custom.ovenCavityStatus", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2022-02-21T22:37:01.648Z", + "profile": { + "id": "8e479dd0-9719-337a-9fbe-2c4572f95c71" + }, + "ocf": { + "ocfDeviceType": "oic.d.range", + "name": "Samsung Range", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "TP1X_DA-KS-RANGE-0101X|40445041|5001011E031511010200000000000000", + "platformVersion": "DAWIT 3.0", + "platformOS": "TizenRT 3.1", + "hwVersion": "Realtek", + "firmwareVersion": "AKS-WW-TP1-20-OVEN-3-CR_40240205", + "vendorId": "DA-KS-RANGE-0101X", + "vendorResourceClientServerVersion": "Realtek Release 3.1.220727", + "lastSignupTime": "2023-11-28T22:49:01.876575Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json b/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json new file mode 100644 index 00000000000..dffe57b3280 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json @@ -0,0 +1,202 @@ +{ + "items": [ + { + "deviceId": "1f98ebd0-ac48-d802-7f62-000001200100", + "name": "Eco Heating System", + "label": "Eco Heating System", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-SAC-EHS-000001-SUB", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "d22d6401-6070-4928-8e7b-b724e2dbf425", + "ownerId": "35445a41-3ae2-4bc0-6f51-31705de6b96f", + "roomId": "169ef666-a51d-4d74-9b45-e660ecd4a8d7", + "deviceTypeName": "Samsung OCF Air Conditioner", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.outingMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.sacDisplayCondition", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.ehsFsvSettings", + "version": 1 + }, + { + "id": "samsungce.ehsCycleData", + "version": 1 + }, + { + "id": "samsungce.ehsTemperatureReference", + "version": 1 + }, + { + "id": "samsungce.ehsThermostat", + "version": 1 + }, + { + "id": "samsungce.toggleSwitch", + "version": 1 + } + ], + "categories": [ + { + "name": "AirConditioner", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "INDOOR", + "label": "INDOOR", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "samsungce.ehsTemperatureReference", + "version": 1 + }, + { + "id": "samsungce.sacDisplayCondition", + "version": 1 + }, + { + "id": "samsungce.ehsThermostat", + "version": 1 + }, + { + "id": "samsungce.toggleSwitch", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-08-02T14:32:26.006Z", + "parentDeviceId": "1f98ebd0-ac48-d802-7f62-12592d8286b7", + "profile": { + "id": "54b9789f-2c8c-310d-9e14-9a84903c792b" + }, + "ocf": { + "ocfDeviceType": "oic.d.airconditioner", + "name": "Eco Heating System", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "platformVersion": "4.0", + "platformOS": "Tizen", + "hwVersion": "", + "firmwareVersion": "20240611.1", + "vendorId": "DA-SAC-EHS-000001-SUB", + "vendorResourceClientServerVersion": "3.2.20", + "lastSignupTime": "2023-08-02T14:32:25.282882Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json new file mode 100644 index 00000000000..8b501cba9b7 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json @@ -0,0 +1,172 @@ +{ + "items": [ + { + "deviceId": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "name": "[airdresser] Samsung", + "label": "AirDresser", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-WM-SC-000001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "4c9052ba-4430-4cb1-a788-f1e4449c43c9", + "deviceTypeName": "Samsung OCF Steam Closet", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "dryerOperatingState", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "custom.steamClosetOperatingState", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.steamClosetWrinklePrevent", + "version": 1 + }, + { + "id": "custom.jobBeginningStatus", + "version": 1 + }, + { + "id": "custom.supportedOptions", + "version": 1 + }, + { + "id": "custom.veryFineDustFilter", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.steamClosetDelayEnd", + "version": 1 + }, + { + "id": "samsungce.steamClosetKeepFreshMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetSanitizeMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetAutoCycleLink", + "version": 1 + }, + { + "id": "samsungce.steamClosetCycle", + "version": 1 + }, + { + "id": "samsungce.steamClosetCyclePreset", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.welcomeMessage", + "version": 1 + }, + { + "id": "samsungce.quickControl", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + }, + { + "id": "sec.wifiConfiguration", + "version": 1 + } + ], + "categories": [ + { + "name": "ClothingCareMachine", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-06-23T16:00:40.545Z", + "profile": { + "id": "a3623498-4747-3761-bac1-ba13f437d8ea" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.steamcloset", + "name": "[airdresser] Samsung", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 2.0 + IPv6", + "hwVersion": "MediaTek", + "firmwareVersion": "DA_DF_TP2_20_COMMON_30230807", + "vendorId": "DA-WM-SC-000001", + "vendorResourceClientServerVersion": "MediaTek Release 2.211214.1", + "lastSignupTime": "2023-06-23T16:00:36.793123Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_wm_wd_000001_1.json b/tests/components/smartthings/fixtures/devices/da_wm_wd_000001_1.json new file mode 100644 index 00000000000..995646438c4 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_wm_wd_000001_1.json @@ -0,0 +1,205 @@ +{ + "items": [ + { + "deviceId": "3a6c4e05-811d-5041-e956-3d04c424cbcd", + "name": "[dryer] Samsung", + "label": "Seca-Roupa", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-WM-WD-000001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "06efa178-ad2f-4d22-838c-d63e05e5a58a", + "ownerId": "1a5f5619-e9ec-4302-beb9-633bb1657897", + "roomId": "dde24053-9707-49a5-ba0e-f19681514f37", + "deviceTypeName": "Samsung OCF Dryer", + "components": [ + { + "id": "main", + "label": "Seca-Roupa", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "dryerOperatingState", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.dryerDryLevel", + "version": 1 + }, + { + "id": "custom.dryerWrinklePrevent", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.jobBeginningStatus", + "version": 1 + }, + { + "id": "custom.supportedOptions", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.detergentOrder", + "version": 1 + }, + { + "id": "samsungce.detergentState", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.dongleSoftwareInstallation", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.dryerAutoCycleLink", + "version": 1 + }, + { + "id": "samsungce.dryerCycle", + "version": 1 + }, + { + "id": "samsungce.dryerCyclePreset", + "version": 1 + }, + { + "id": "samsungce.dryerDelayEnd", + "version": 1 + }, + { + "id": "samsungce.dryerDryingTemperature", + "version": 1 + }, + { + "id": "samsungce.dryerDryingTime", + "version": 1 + }, + { + "id": "samsungce.dryerFreezePrevent", + "version": 1 + }, + { + "id": "samsungce.dryerOperatingState", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.welcomeMessage", + "version": 1 + }, + { + "id": "samsungce.quickControl", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + }, + { + "id": "sec.wifiConfiguration", + "version": 1 + } + ], + "categories": [ + { + "name": "Dryer", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "hca.main", + "label": "hca.main", + "capabilities": [ + { + "id": "hca.dryerMode", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2020-06-20T10:00:42Z", + "profile": { + "id": "53a1d049-eeda-396c-8324-e33438ef57be" + }, + "ocf": { + "ocfDeviceType": "oic.d.dryer", + "name": "[dryer] Samsung", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "DA_WM_A51_20_COMMON|20221341|30010102001211000103000000000000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 1.0 + IPv6", + "hwVersion": "ARTIK051", + "firmwareVersion": "DA_WM_A51_20_COMMON_30230708", + "vendorId": "DA-WM-WD-000001", + "vendorResourceClientServerVersion": "ARTIK051 Release 2.210224.1", + "lastSignupTime": "2020-11-19T04:43:50.736Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/ecobee_thermostat_offline.json b/tests/components/smartthings/fixtures/devices/ecobee_thermostat_offline.json new file mode 100644 index 00000000000..5fe8d8d28be --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/ecobee_thermostat_offline.json @@ -0,0 +1,82 @@ +{ + "items": [ + { + "deviceId": "1888b38f-6246-4f1e-911b-bfcfb66999db", + "name": "v4 - ecobee Thermostat - Heat and Cool (F)", + "label": "Downstairs", + "manufacturerName": "0A0b", + "presentationId": "ST_5334da38-8076-4b40-9f6c-ac3fccaa5d24", + "deviceManufacturerCode": "ecobee", + "locationId": "1030449a-22c1-4a80-9781-0bd4ab7f0f2f", + "ownerId": "e7dbb793-4351-4cdc-b037-e6e0b4f9df67", + "roomId": "d22e6f98-78fe-4a76-b904-6cad8628da59", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "relativeHumidityMeasurement", + "version": 1 + }, + { + "id": "thermostatHeatingSetpoint", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "thermostatOperatingState", + "version": 1 + }, + { + "id": "thermostatMode", + "version": 1 + }, + { + "id": "thermostatFanMode", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "healthCheck", + "version": 1 + } + ], + "categories": [ + { + "name": "Thermostat", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-03-10T00:57:26.760Z", + "profile": { + "id": "234d537d-d388-497f-b0f4-2e25025119ba" + }, + "viper": { + "manufacturerName": "ecobee", + "modelName": "nikeSmart-thermostat", + "swVersion": "250308073247", + "hwVersion": "250308073247", + "endpointAppId": "viper_92ccdcc0-4184-11eb-b9c5-036180216747" + }, + "type": "VIPER", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json b/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json new file mode 100644 index 00000000000..0cd42e0e2ce --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json @@ -0,0 +1,155 @@ +{ + "items": [ + { + "deviceId": "5e5b97f3-3094-44e6-abc0-f61283412d6a", + "name": "heatit-zpushwall", + "label": "Livingroom smart switch", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "52933933-7123-3315-a441-92d65df5f031", + "deviceManufacturerCode": "019B-0004-2403", + "locationId": "c85a9f8a-5d2e-4cdd-8bdb-bc49ba4a3544", + "ownerId": "7b68139b-d068-45d8-bf27-961320350024", + "roomId": "56e43461-2f7d-4c43-ba7c-29465f991289", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "battery", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button1", + "label": "button1", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button2", + "label": "button2", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button3", + "label": "button3", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button4", + "label": "button4", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button5", + "label": "button5", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button6", + "label": "button6", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-12-04T16:51:15.774Z", + "parentDeviceId": "4869d882-e898-40c3-a198-7611b72187a5", + "profile": { + "id": "2d6e59af-63df-3102-8515-66f3d75c9323" + }, + "zwave": { + "networkId": "12", + "driverId": "1d39c140-ce10-490d-bf52-4de7b72caab6", + "executingLocally": true, + "hubId": "4869d882-e898-40c3-a198-7611b72187a5", + "networkSecurityLevel": "ZWAVE_S2_AUTHENTICATED", + "provisioningState": "NONFUNCTIONAL", + "manufacturerId": 411, + "productType": 4, + "productId": 9219 + }, + "type": "ZWAVE", + "restrictionTier": 0, + "allowed": null, + "executionContext": "LOCAL", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json new file mode 100644 index 00000000000..5f99cefddcb --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json @@ -0,0 +1,106 @@ +{ + "items": [ + { + "deviceId": "afcf3b91-0000-1111-2222-ddff2a0a6577", + "name": "[AV] Samsung Soundbar Q80R", + "label": "Soundbar", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-NetworkAudio-001S", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "c7f8e400-0000-1111-2222-76463f4eb484", + "ownerId": "bd0d9288-0000-1111-2222-68310a42a709", + "roomId": "be09ff51-0000-1111-2222-e48e2dab37fd", + "deviceTypeName": "Samsung OCF Network Audio Player", + "components": [ + { + "id": "main", + "label": "Soundbar", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioVolume", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "audioTrackData", + "version": 1 + }, + { + "id": "mediaInputSource", + "version": 1 + }, + { + "id": "samsungvd.audioInputSource", + "version": 1 + }, + { + "id": "mediaPlayback", + "version": 1 + }, + { + "id": "audioNotification", + "version": 1 + }, + { + "id": "samsungvd.groupInfo", + "version": 1 + } + ], + "categories": [ + { + "name": "NetworkAudio", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2020-10-19T01:35:08Z", + "profile": { + "id": "c1036d88-000-1111-2222-a361463fd53f" + }, + "ocf": { + "ocfDeviceType": "oic.d.networkaudio", + "name": "[AV] Samsung Soundbar Q80R", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "Q80R", + "platformVersion": "Tizen 4.0", + "platformOS": "4.1.10", + "hwVersion": "0-0", + "firmwareVersion": "HW-Q80RWWB-1012.6", + "vendorId": "VD-NetworkAudio-001S", + "vendorResourceClientServerVersion": "1.2", + "locale": "KO", + "lastSignupTime": "2021-01-16T07:05:02.184545Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/ikea_kadrilj.json b/tests/components/smartthings/fixtures/devices/ikea_kadrilj.json new file mode 100644 index 00000000000..36f9d40f7e4 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/ikea_kadrilj.json @@ -0,0 +1,78 @@ +{ + "items": [ + { + "deviceId": "71afed1c-006d-4e48-b16e-e7f88f9fd638", + "name": "window-treatment-battery", + "label": "Kitchen IKEA KADRILJ Window blind", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "fa41d7d3-4c03-327f-b0ce-2edc829f0e34", + "deviceManufacturerCode": "IKEA of Sweden", + "locationId": "5b5f96b5-0286-4f4a-86ef-d5d5c1a78cb8", + "ownerId": "f43fd9e5-2ecd-4aae-aeac-73a8e5cb04da", + "roomId": "89f675a1-1f16-451c-8ab1-a7fdacc5852d", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "windowShade", + "version": 1 + }, + { + "id": "windowShadePreset", + "version": 1 + }, + { + "id": "windowShadeLevel", + "version": 1 + }, + { + "id": "battery", + "version": 1 + }, + { + "id": "firmwareUpdate", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + } + ], + "categories": [ + { + "name": "Blind", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-04-26T18:19:06.792Z", + "parentDeviceId": "3ffe04c4-a12c-41f5-b83d-c1b28eca2b5f", + "profile": { + "id": "6d9804bc-9e56-3823-95be-4b315669c481" + }, + "zigbee": { + "eui": "000D6FFFFE2AD0E7", + "networkId": "3009", + "driverId": "46b8bada-1a55-4f84-8915-47ce2cad3621", + "executingLocally": true, + "hubId": "3ffe04c4-a12c-41f5-b83d-c1b28eca2b5f", + "provisioningState": "NONFUNCTIONAL" + }, + "type": "ZIGBEE", + "restrictionTier": 0, + "allowed": null, + "indoorMap": { + "coordinates": [10.0, 36.0, 98.0], + "rotation": [270.0, 0.0, 0.0], + "visible": true, + "data": null + }, + "executionContext": "LOCAL", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/tplink_p110.json b/tests/components/smartthings/fixtures/devices/tplink_p110.json new file mode 100644 index 00000000000..ffe7de5ff68 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/tplink_p110.json @@ -0,0 +1,73 @@ +{ + "items": [ + { + "deviceId": "6602696a-1e48-49e4-919f-69406f5b5da1", + "name": "plug-energy-usage-report", + "label": "Sp\u00fclmaschine", + "manufacturerName": "0AI2", + "presentationId": "ST_8f2be0ec-1113-46e0-ad56-3e92eb27410f", + "deviceManufacturerCode": "TP-Link", + "locationId": "70da36b0-bd25-410c-beed-7f0dbf658448", + "ownerId": "be5d4173-dd49-1eee-56f5-f98306ee872c", + "roomId": "bd13616d-b7e2-44ff-914c-eb38ea18c4b4", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "healthCheck", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + } + ], + "categories": [ + { + "name": "SmartPlug", + "categoryType": "manufacturer" + }, + { + "name": "SmartPlug", + "categoryType": "user" + } + ] + } + ], + "createTime": "2024-03-07T21:14:59.762Z", + "profile": { + "id": "a25b207e-cbb9-40ae-8a88-906637c22ab6" + }, + "viper": { + "uniqueIdentifier": "8022F7F6FE0A6EACA52B5D89C0D667352136D8C6", + "manufacturerName": "TP-Link", + "modelName": "P110", + "swVersion": "1.3.1 Build 240621 Rel.162048", + "hwVersion": "1.0", + "endpointAppId": "viper_7ea6bb80-b876-11eb-be42-952f31ab3f7b" + }, + "type": "VIPER", + "restrictionTier": 0, + "allowed": null, + "indoorMap": { + "coordinates": [0.0, 0.0, 0.0], + "rotation": [0.0, 180.0, 0.0], + "visible": false, + "data": null + }, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json new file mode 100644 index 00000000000..ef1dd2e96bc --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json @@ -0,0 +1,81 @@ +{ + "items": [ + { + "deviceId": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "name": "VD-Sensor.Light-2023", + "label": "Light Sensor - 55\" The Frame", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-Sensor.Light-2023", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "8a4fac38-48d1-4a8c-922b-92620442363b", + "deviceTypeName": "x.com.st.d.sensor.light", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "relativeBrightness", + "version": 1 + }, + { + "id": "samsungvd.deviceCategory", + "version": 1 + } + ], + "categories": [ + { + "name": "LightSensor", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-11-15T22:21:27.908Z", + "parentDeviceId": "425ac77a-f7c9-a62d-ff12-cdad144952e3", + "profile": { + "id": "5f1633fb-0c63-34d3-9d04-a314d393d225" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.sensor.light", + "name": "Light Sensor - 55 The Frame", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "QE55LS03DAUXXN", + "platformVersion": "8.0", + "platformOS": "Tizen", + "hwVersion": "", + "firmwareVersion": "latest", + "vendorId": "VD-Sensor.Light-2023", + "vendorResourceClientServerVersion": "4.0.26", + "lastSignupTime": "2024-11-15T22:21:27.933740026Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/subscription.json b/tests/components/smartthings/fixtures/subscription.json new file mode 100644 index 00000000000..80f37445524 --- /dev/null +++ b/tests/components/smartthings/fixtures/subscription.json @@ -0,0 +1,16 @@ +{ + "subscriptionId": "f5768ce8-c9e5-4507-9020-912c0c60e0ab", + "registrationUrl": "https://spigot-regional.api.smartthings.com/filters/f5768ce8-c9e5-4507-9020-912c0c60e0ab/activate?filterRegion=eu-west-1", + "name": "My Home Assistant sub", + "version": 20250122, + "subscriptionFilters": [ + { + "type": "LOCATIONIDS", + "value": ["88a3a314-f0c8-40b4-bb44-44ba06c9c42e"], + "eventType": ["DEVICE_EVENT"], + "attribute": null, + "capability": null, + "component": null + } + ] +} diff --git a/tests/components/smartthings/snapshots/test_binary_sensor.ambr b/tests/components/smartthings/snapshots/test_binary_sensor.ambr index 27a5e38a123..d6a5ac6a4e7 100644 --- a/tests/components/smartthings/snapshots/test_binary_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_binary_sensor.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.motion', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -77,7 +77,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.sound', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_soundSensor_sound_sound', 'unit_of_measurement': None, }) # --- @@ -95,7 +95,7 @@ 'state': 'off', }) # --- -# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor_door-entry] +# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -108,7 +108,577 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.front_door_open_closed_sensor_door', + 'entity_id': 'binary_sensor.front_door_open_closed_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage_door', + 'friendly_name': '.Front Door Open/Closed Sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.front_door_open_closed_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.induction_hob_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '808dbd84-f357-47e2-a0cd-3b66fa22d584_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Induction Hob Power', + }), + 'context': , + 'entity_id': 'binary_sensor.induction_hob_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Microwave Door', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Microwave Power', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Oven Door', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Vulcan Door', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.refrigerator_cooler_door', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -120,23 +690,23 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Door', + 'original_name': 'Cooler door', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.contact', + 'translation_key': 'cooler_door', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_cooler_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor_door-state] +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'door', - 'friendly_name': '.Front Door Open/Closed Sensor Door', + 'friendly_name': 'Refrigerator Cooler door', }), 'context': , - 'entity_id': 'binary_sensor.front_door_open_closed_sensor_door', + 'entity_id': 'binary_sensor.refrigerator_cooler_door', 'last_changed': , 'last_reported': , 'last_updated': , @@ -173,7 +743,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.contact', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- @@ -191,6 +761,1000 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.refrigerator_freezer_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Freezer door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'freezer_door', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_freezer_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Refrigerator Freezer door', + }), + 'context': , + 'entity_id': 'binary_sensor.refrigerator_freezer_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dishwasher Power', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dryer Power', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Seca-Roupa Power', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Washer Power', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Washing Machine Power', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_entities[ecobee_sensor][binary_sensor.child_bedroom_motion-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -221,7 +1785,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.motion', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -269,7 +1833,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.presence', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -317,7 +1881,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9.presence', + 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -335,6 +1899,54 @@ 'state': 'on', }) # --- +# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.deck_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Deck Door', + }), + 'context': , + 'entity_id': 'binary_sensor.deck_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_acceleration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -365,7 +1977,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'acceleration', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.acceleration', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_accelerationSensor_acceleration_acceleration', 'unit_of_measurement': None, }) # --- @@ -383,54 +1995,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.deck_door_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.contact', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Deck Door Door', - }), - 'context': , - 'entity_id': 'binary_sensor.deck_door_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[virtual_valve][binary_sensor.volvo_valve-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -461,7 +2025,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'valve', - 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3.valve', + 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_main_valve_valve_valve', 'unit_of_measurement': None, }) # --- @@ -509,7 +2073,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.water', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_waterSensor_water_water', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_button.ambr b/tests/components/smartthings/snapshots/test_button.ambr new file mode 100644 index 00000000000..2c9dbd008af --- /dev/null +++ b/tests/components/smartthings/snapshots/test_button.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_all_entities[da_ks_microwave_0101x][button.microwave_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][button.microwave_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Stop', + }), + 'context': , + 'entity_id': 'button.microwave_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][button.oven_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.oven_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][button.oven_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Stop', + }), + 'context': , + 'entity_id': 'button.oven_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][button.vulcan_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.vulcan_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][button.vulcan_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Stop', + }), + 'context': , + 'entity_id': 'button.vulcan_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.refrigerator_reset_water_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset water filter', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_water_filter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_custom.waterFilter_resetWaterFilter', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Refrigerator Reset water filter', + }), + 'context': , + 'entity_id': 'button.refrigerator_reset_water_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_climate.ambr b/tests/components/smartthings/snapshots/test_climate.ambr index 6b512f93d39..10e9dbd5489 100644 --- a/tests/components/smartthings/snapshots/test_climate.ambr +++ b/tests/components/smartthings/snapshots/test_climate.ambr @@ -1,4 +1,68 @@ # serializer version: 1 +# name: test_all_entities[aux_ac][climate.aux_a_c_on_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': None, + 'hvac_modes': list([ + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.aux_a_c_on_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[aux_ac][climate.aux_a_c_on_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'fan_mode': 'auto', + 'fan_modes': None, + 'friendly_name': 'AUX A/C on-off', + 'hvac_modes': list([ + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'temperature': 20.0, + }), + 'context': , + 'entity_id': 'climate.aux_a_c_on_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[bosch_radiator_thermostat_ii][climate.radiator_thermostat_ii_m_wohnzimmer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -35,7 +99,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', 'unit_of_measurement': None, }) # --- @@ -114,7 +178,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main', 'unit_of_measurement': None, }) # --- @@ -219,7 +283,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main', 'unit_of_measurement': None, }) # --- @@ -319,7 +383,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main', 'unit_of_measurement': None, }) # --- @@ -397,7 +461,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main', 'unit_of_measurement': None, }) # --- @@ -432,6 +496,70 @@ 'state': 'heat', }) # --- +# name: test_all_entities[ecobee_thermostat_offline][climate.downstairs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': None, + 'hvac_modes': list([ + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.downstairs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[ecobee_thermostat_offline][climate.downstairs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'fan_mode': None, + 'fan_modes': None, + 'friendly_name': 'Downstairs', + 'hvac_modes': list([ + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.downstairs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[generic_ef00_v1][climate.thermostat_kuche-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -467,7 +595,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main', 'unit_of_measurement': None, }) # --- @@ -529,7 +657,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main', 'unit_of_measurement': None, }) # --- @@ -595,7 +723,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_cover.ambr b/tests/components/smartthings/snapshots/test_cover.ambr index aa928c09b7a..4b5cf705665 100644 --- a/tests/components/smartthings/snapshots/test_cover.ambr +++ b/tests/components/smartthings/snapshots/test_cover.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '571af102-15db-4030-b76b-245a691f74a5', + 'unique_id': '571af102-15db-4030-b76b-245a691f74a5_main', 'unit_of_measurement': None, }) # --- @@ -49,3 +49,54 @@ 'state': 'open', }) # --- +# name: test_all_entities[ikea_kadrilj][cover.kitchen_ikea_kadrilj_window_blind-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.kitchen_ikea_kadrilj_window_blind', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[ikea_kadrilj][cover.kitchen_ikea_kadrilj_window_blind-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'battery_level': 37, + 'current_position': 32, + 'device_class': 'shade', + 'friendly_name': 'Kitchen IKEA KADRILJ Window blind', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.kitchen_ikea_kadrilj_window_blind', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_diagnostics.ambr b/tests/components/smartthings/snapshots/test_diagnostics.ambr index 7610c8839ba..b9847bf9746 100644 --- a/tests/components/smartthings/snapshots/test_diagnostics.ambr +++ b/tests/components/smartthings/snapshots/test_diagnostics.ambr @@ -1,307 +1,311 @@ # serializer version: 1 # name: test_config_entry_diagnostics[da_ac_rac_000001] dict({ - '_links': dict({ - }), - 'items': list([ + 'devices': list([ dict({ - 'allowed': list([ - ]), - 'components': list([ + '_links': dict({ + }), + 'items': list([ dict({ - 'capabilities': list([ + 'allowed': list([ + ]), + 'components': list([ dict({ - 'id': 'ocf', - 'version': 1, + 'capabilities': list([ + dict({ + 'id': 'ocf', + 'version': 1, + }), + dict({ + 'id': 'switch', + 'version': 1, + }), + dict({ + 'id': 'airConditionerMode', + 'version': 1, + }), + dict({ + 'id': 'airConditionerFanMode', + 'version': 1, + }), + dict({ + 'id': 'fanOscillationMode', + 'version': 1, + }), + dict({ + 'id': 'airQualitySensor', + 'version': 1, + }), + dict({ + 'id': 'temperatureMeasurement', + 'version': 1, + }), + dict({ + 'id': 'thermostatCoolingSetpoint', + 'version': 1, + }), + dict({ + 'id': 'relativeHumidityMeasurement', + 'version': 1, + }), + dict({ + 'id': 'dustSensor', + 'version': 1, + }), + dict({ + 'id': 'veryFineDustSensor', + 'version': 1, + }), + dict({ + 'id': 'audioVolume', + 'version': 1, + }), + dict({ + 'id': 'remoteControlStatus', + 'version': 1, + }), + dict({ + 'id': 'powerConsumptionReport', + 'version': 1, + }), + dict({ + 'id': 'demandResponseLoadControl', + 'version': 1, + }), + dict({ + 'id': 'refresh', + 'version': 1, + }), + dict({ + 'id': 'execute', + 'version': 1, + }), + dict({ + 'id': 'custom.spiMode', + 'version': 1, + }), + dict({ + 'id': 'custom.thermostatSetpointControl', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerOptionalMode', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerTropicalNightMode', + 'version': 1, + }), + dict({ + 'id': 'custom.autoCleaningMode', + 'version': 1, + }), + dict({ + 'id': 'custom.deviceReportStateConfiguration', + 'version': 1, + }), + dict({ + 'id': 'custom.energyType', + 'version': 1, + }), + dict({ + 'id': 'custom.dustFilter', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerOdorController', + 'version': 1, + }), + dict({ + 'id': 'custom.deodorFilter', + 'version': 1, + }), + dict({ + 'id': 'custom.disabledComponents', + 'version': 1, + }), + dict({ + 'id': 'custom.disabledCapabilities', + 'version': 1, + }), + dict({ + 'id': 'samsungce.deviceIdentification', + 'version': 1, + }), + dict({ + 'id': 'samsungce.dongleSoftwareInstallation', + 'version': 1, + }), + dict({ + 'id': 'samsungce.softwareUpdate', + 'version': 1, + }), + dict({ + 'id': 'samsungce.selfCheck', + 'version': 1, + }), + dict({ + 'id': 'samsungce.driverVersion', + 'version': 1, + }), + ]), + 'categories': list([ + dict({ + 'categoryType': 'manufacturer', + 'name': 'AirConditioner', + }), + ]), + 'id': 'main', + 'label': 'main', }), dict({ - 'id': 'switch', - 'version': 1, - }), - dict({ - 'id': 'airConditionerMode', - 'version': 1, - }), - dict({ - 'id': 'airConditionerFanMode', - 'version': 1, - }), - dict({ - 'id': 'fanOscillationMode', - 'version': 1, - }), - dict({ - 'id': 'airQualitySensor', - 'version': 1, - }), - dict({ - 'id': 'temperatureMeasurement', - 'version': 1, - }), - dict({ - 'id': 'thermostatCoolingSetpoint', - 'version': 1, - }), - dict({ - 'id': 'relativeHumidityMeasurement', - 'version': 1, - }), - dict({ - 'id': 'dustSensor', - 'version': 1, - }), - dict({ - 'id': 'veryFineDustSensor', - 'version': 1, - }), - dict({ - 'id': 'audioVolume', - 'version': 1, - }), - dict({ - 'id': 'remoteControlStatus', - 'version': 1, - }), - dict({ - 'id': 'powerConsumptionReport', - 'version': 1, - }), - dict({ - 'id': 'demandResponseLoadControl', - 'version': 1, - }), - dict({ - 'id': 'refresh', - 'version': 1, - }), - dict({ - 'id': 'execute', - 'version': 1, - }), - dict({ - 'id': 'custom.spiMode', - 'version': 1, - }), - dict({ - 'id': 'custom.thermostatSetpointControl', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerOptionalMode', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerTropicalNightMode', - 'version': 1, - }), - dict({ - 'id': 'custom.autoCleaningMode', - 'version': 1, - }), - dict({ - 'id': 'custom.deviceReportStateConfiguration', - 'version': 1, - }), - dict({ - 'id': 'custom.energyType', - 'version': 1, - }), - dict({ - 'id': 'custom.dustFilter', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerOdorController', - 'version': 1, - }), - dict({ - 'id': 'custom.deodorFilter', - 'version': 1, - }), - dict({ - 'id': 'custom.disabledComponents', - 'version': 1, - }), - dict({ - 'id': 'custom.disabledCapabilities', - 'version': 1, - }), - dict({ - 'id': 'samsungce.deviceIdentification', - 'version': 1, - }), - dict({ - 'id': 'samsungce.dongleSoftwareInstallation', - 'version': 1, - }), - dict({ - 'id': 'samsungce.softwareUpdate', - 'version': 1, - }), - dict({ - 'id': 'samsungce.selfCheck', - 'version': 1, - }), - dict({ - 'id': 'samsungce.driverVersion', - 'version': 1, + 'capabilities': list([ + dict({ + 'id': 'switch', + 'version': 1, + }), + dict({ + 'id': 'airConditionerMode', + 'version': 1, + }), + dict({ + 'id': 'airConditionerFanMode', + 'version': 1, + }), + dict({ + 'id': 'fanOscillationMode', + 'version': 1, + }), + dict({ + 'id': 'temperatureMeasurement', + 'version': 1, + }), + dict({ + 'id': 'thermostatCoolingSetpoint', + 'version': 1, + }), + dict({ + 'id': 'relativeHumidityMeasurement', + 'version': 1, + }), + dict({ + 'id': 'airQualitySensor', + 'version': 1, + }), + dict({ + 'id': 'dustSensor', + 'version': 1, + }), + dict({ + 'id': 'veryFineDustSensor', + 'version': 1, + }), + dict({ + 'id': 'odorSensor', + 'version': 1, + }), + dict({ + 'id': 'remoteControlStatus', + 'version': 1, + }), + dict({ + 'id': 'audioVolume', + 'version': 1, + }), + dict({ + 'id': 'custom.thermostatSetpointControl', + 'version': 1, + }), + dict({ + 'id': 'custom.autoCleaningMode', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerTropicalNightMode', + 'version': 1, + }), + dict({ + 'id': 'custom.disabledCapabilities', + 'version': 1, + }), + dict({ + 'id': 'ocf', + 'version': 1, + }), + dict({ + 'id': 'powerConsumptionReport', + 'version': 1, + }), + dict({ + 'id': 'demandResponseLoadControl', + 'version': 1, + }), + dict({ + 'id': 'custom.spiMode', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerOptionalMode', + 'version': 1, + }), + dict({ + 'id': 'custom.deviceReportStateConfiguration', + 'version': 1, + }), + dict({ + 'id': 'custom.energyType', + 'version': 1, + }), + dict({ + 'id': 'custom.dustFilter', + 'version': 1, + }), + dict({ + 'id': 'custom.airConditionerOdorController', + 'version': 1, + }), + dict({ + 'id': 'custom.deodorFilter', + 'version': 1, + }), + ]), + 'categories': list([ + dict({ + 'categoryType': 'manufacturer', + 'name': 'Other', + }), + ]), + 'id': '1', + 'label': '1', }), ]), - 'categories': list([ - dict({ - 'categoryType': 'manufacturer', - 'name': 'AirConditioner', - }), - ]), - 'id': 'main', - 'label': 'main', - }), - dict({ - 'capabilities': list([ - dict({ - 'id': 'switch', - 'version': 1, - }), - dict({ - 'id': 'airConditionerMode', - 'version': 1, - }), - dict({ - 'id': 'airConditionerFanMode', - 'version': 1, - }), - dict({ - 'id': 'fanOscillationMode', - 'version': 1, - }), - dict({ - 'id': 'temperatureMeasurement', - 'version': 1, - }), - dict({ - 'id': 'thermostatCoolingSetpoint', - 'version': 1, - }), - dict({ - 'id': 'relativeHumidityMeasurement', - 'version': 1, - }), - dict({ - 'id': 'airQualitySensor', - 'version': 1, - }), - dict({ - 'id': 'dustSensor', - 'version': 1, - }), - dict({ - 'id': 'veryFineDustSensor', - 'version': 1, - }), - dict({ - 'id': 'odorSensor', - 'version': 1, - }), - dict({ - 'id': 'remoteControlStatus', - 'version': 1, - }), - dict({ - 'id': 'audioVolume', - 'version': 1, - }), - dict({ - 'id': 'custom.thermostatSetpointControl', - 'version': 1, - }), - dict({ - 'id': 'custom.autoCleaningMode', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerTropicalNightMode', - 'version': 1, - }), - dict({ - 'id': 'custom.disabledCapabilities', - 'version': 1, - }), - dict({ - 'id': 'ocf', - 'version': 1, - }), - dict({ - 'id': 'powerConsumptionReport', - 'version': 1, - }), - dict({ - 'id': 'demandResponseLoadControl', - 'version': 1, - }), - dict({ - 'id': 'custom.spiMode', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerOptionalMode', - 'version': 1, - }), - dict({ - 'id': 'custom.deviceReportStateConfiguration', - 'version': 1, - }), - dict({ - 'id': 'custom.energyType', - 'version': 1, - }), - dict({ - 'id': 'custom.dustFilter', - 'version': 1, - }), - dict({ - 'id': 'custom.airConditionerOdorController', - 'version': 1, - }), - dict({ - 'id': 'custom.deodorFilter', - 'version': 1, - }), - ]), - 'categories': list([ - dict({ - 'categoryType': 'manufacturer', - 'name': 'Other', - }), - ]), - 'id': '1', - 'label': '1', + 'createTime': '2021-04-06T16:43:34.753Z', + 'deviceId': '96a5ef74-5832-a84b-f1f7-ca799957065d', + 'deviceManufacturerCode': 'Samsung Electronics', + 'deviceTypeName': 'Samsung OCF Air Conditioner', + 'executionContext': 'CLOUD', + 'label': 'AC Office Granit', + 'locationId': '58d3fd7c-c512-4da3-b500-ef269382756c', + 'manufacturerName': 'Samsung Electronics', + 'name': '[room a/c] Samsung', + 'ocf': dict({ + 'additionalAuthCodeRequired': False, + 'lastSignupTime': '2025-01-08T02:32:04.631093137Z', + 'manufacturerName': 'Samsung Electronics', + 'ocfDeviceType': 'x.com.st.d.sensor.light', + 'transferCandidate': False, + 'vendorId': 'VD-Sensor.Light-2023', + }), + 'ownerId': 'f9a28d7c-1ed5-d9e9-a81c-18971ec081db', + 'presentationId': 'DA-AC-RAC-000001', + 'profile': dict({ + 'id': '60fbc713-8da5-315d-b31a-6d6dcde4be7b', + }), + 'restrictionTier': 0, + 'roomId': '7715151d-0314-457a-a82c-5ce48900e065', + 'type': 'OCF', }), ]), - 'createTime': '2021-04-06T16:43:34.753Z', - 'deviceId': '96a5ef74-5832-a84b-f1f7-ca799957065d', - 'deviceManufacturerCode': 'Samsung Electronics', - 'deviceTypeName': 'Samsung OCF Air Conditioner', - 'executionContext': 'CLOUD', - 'label': 'AC Office Granit', - 'locationId': '58d3fd7c-c512-4da3-b500-ef269382756c', - 'manufacturerName': 'Samsung Electronics', - 'name': '[room a/c] Samsung', - 'ocf': dict({ - 'additionalAuthCodeRequired': False, - 'lastSignupTime': '2025-01-08T02:32:04.631093137Z', - 'manufacturerName': 'Samsung Electronics', - 'ocfDeviceType': 'x.com.st.d.sensor.light', - 'transferCandidate': False, - 'vendorId': 'VD-Sensor.Light-2023', - }), - 'ownerId': 'f9a28d7c-1ed5-d9e9-a81c-18971ec081db', - 'presentationId': 'DA-AC-RAC-000001', - 'profile': dict({ - 'id': '60fbc713-8da5-315d-b31a-6d6dcde4be7b', - }), - 'restrictionTier': 0, - 'roomId': '7715151d-0314-457a-a82c-5ce48900e065', - 'type': 'OCF', }), ]), }) diff --git a/tests/components/smartthings/snapshots/test_event.ambr b/tests/components/smartthings/snapshots/test_event.ambr new file mode 100644 index 00000000000..79c57df5fd7 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_event.ambr @@ -0,0 +1,361 @@ +# serializer version: 1 +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button1', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button1_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button1', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button2', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button2_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button2', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button3', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button3_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button3', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button4', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button4_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button4', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button5', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button5_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button5', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button6', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button6_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button6', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_fan.ambr b/tests/components/smartthings/snapshots/test_fan.ambr index 40ab7b12267..1196118b3b5 100644 --- a/tests/components/smartthings/snapshots/test_fan.ambr +++ b/tests/components/smartthings/snapshots/test_fan.ambr @@ -37,7 +37,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71', + 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71_main', 'unit_of_measurement': None, }) # --- @@ -97,7 +97,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c', + 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_init.ambr b/tests/components/smartthings/snapshots/test_init.ambr index 7c2589590c5..6a402182b82 100644 --- a/tests/components/smartthings/snapshots/test_init.ambr +++ b/tests/components/smartthings/snapshots/test_init.ambr @@ -2,6 +2,39 @@ # name: test_button_event[button] # --- +# name: test_devices[abl_light_b_001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '7c16163e-c94e-482f-95f6-139ae0cd9d5e', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Kitchen Light 5', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[aeotec_home_energy_meter_gen5] DeviceRegistryEntrySnapshot({ 'area_id': 'toilet', @@ -35,6 +68,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[aux_ac] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'bf53a150-f8a4-45d1-aac4-86252475d551', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'AUX A/C on-off', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[base_electric_meter] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -233,6 +299,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_ac_airsensor_01001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'Realtek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'a3a970ea-e09c-9c04-161b-94c934e21666', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'ASM-KR-TP1-22-ACMB1M', + 'model_id': None, + 'name': '에어모니터 플러스', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'ASM-KR-TP1-22-ACMB1M_16240426', + 'via_device_id': None, + }) +# --- # name: test_devices[da_ac_rac_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -332,6 +431,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_ks_cooktop_31001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '808dbd84-f357-47e2-a0cd-3b66fa22d584', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Induction Hob', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[da_ks_microwave_0101x] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -365,6 +497,72 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_ks_oven_01061] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'Realtek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '9447959a-0dfa-6b27-d40d-650da525c53f', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'TP1X_DA-KS-OVEN-01061', + 'model_id': None, + 'name': 'Oven', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'AKS-WW-TP1X-21-OVEN_40211229', + 'via_device_id': None, + }) +# --- +# name: test_devices[da_ks_range_0101x] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'Realtek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'TP1X_DA-KS-RANGE-0101X', + 'model_id': None, + 'name': 'Vulcan', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'AKS-WW-TP1-20-OVEN-3-CR_40240205', + 'via_device_id': None, + }) +# --- # name: test_devices[da_ref_normal_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -431,6 +629,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_sac_ehs_000001_sub] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '1f98ebd0-ac48-d802-7f62-000001200100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'SAC_EHS_MONO', + 'model_id': None, + 'name': 'Eco Heating System', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '20240611.1', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_dw_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -464,6 +695,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_wm_sc_000001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'MediaTek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'DA_DF_TP2_20_COMMON', + 'model_id': None, + 'name': 'AirDresser', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'DA_DF_TP2_20_COMMON_30230807', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_wd_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -497,6 +761,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[da_wm_wd_000001_1] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'ARTIK051', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '3a6c4e05-811d-5041-e956-3d04c424cbcd', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'DA_WM_A51_20_COMMON', + 'model_id': None, + 'name': 'Seca-Roupa', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'DA_WM_A51_20_COMMON_30230708', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_wm_000001] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -629,6 +926,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[ecobee_thermostat_offline] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '250308073247', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '1888b38f-6246-4f1e-911b-bfcfb66999db', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'ecobee', + 'model': 'nikeSmart-thermostat', + 'model_id': None, + 'name': 'Downstairs', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '250308073247', + 'via_device_id': None, + }) +# --- # name: test_devices[fake_fan] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -761,6 +1091,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[heatit_zpushwall] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5e5b97f3-3094-44e6-abc0-f61283412d6a', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Livingroom smart switch', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[heatit_ztrm3_thermostat] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -860,6 +1223,72 @@ 'via_device_id': None, }) # --- +# name: test_devices[hw_q80r_soundbar] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '0-0', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'afcf3b91-0000-1111-2222-ddff2a0a6577', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'Q80R', + 'model_id': None, + 'name': 'Soundbar', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'HW-Q80RWWB-1012.6', + 'via_device_id': None, + }) +# --- +# name: test_devices[ikea_kadrilj] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '71afed1c-006d-4e48-b16e-e7f88f9fd638', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Kitchen IKEA KADRILJ Window blind', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[im_speaker_ai_0001] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -1058,6 +1487,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[tplink_p110] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '6602696a-1e48-49e4-919f-69406f5b5da1', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'P110', + 'model_id': None, + 'name': 'Spülmaschine', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.3.1 Build 240621 Rel.162048', + 'via_device_id': None, + }) +# --- # name: test_devices[vd_network_audio_002s] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -1091,6 +1553,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[vd_sensor_light_2023] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5cc1c096-98b9-460c-8f1c-1045509ec605', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'QE55LS03DAUXXN', + 'model_id': None, + 'name': 'Light Sensor - 55" The Frame', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'latest', + 'via_device_id': None, + }) +# --- # name: test_devices[vd_stv_2017_k] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', @@ -1261,7 +1756,7 @@ 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , - 'configuration_url': None, + 'configuration_url': 'https://account.smartthings.com', 'connections': set({ tuple( 'mac', diff --git a/tests/components/smartthings/snapshots/test_light.ambr b/tests/components/smartthings/snapshots/test_light.ambr index 8766811c443..6826a555f6a 100644 --- a/tests/components/smartthings/snapshots/test_light.ambr +++ b/tests/components/smartthings/snapshots/test_light.ambr @@ -1,4 +1,74 @@ # serializer version: 1 +# name: test_all_entities[abl_light_b_001][light.kitchen_light_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 9000, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 111, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.kitchen_light_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '7c16163e-c94e-482f-95f6-139ae0cd9d5e_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[abl_light_b_001][light.kitchen_light_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Kitchen Light 5', + 'hs_color': None, + 'max_color_temp_kelvin': 9000, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 111, + 'rgb_color': None, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.kitchen_light_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[centralite][light.dimmer_debian-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -33,7 +103,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', 'unit_of_measurement': None, }) # --- @@ -90,7 +160,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298', + 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298_main', 'unit_of_measurement': None, }) # --- @@ -151,7 +221,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370', + 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370_main', 'unit_of_measurement': None, }) # --- @@ -232,7 +302,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054', + 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_lock.ambr b/tests/components/smartthings/snapshots/test_lock.ambr index 2cf9688c3dd..325ce0cc677 100644 --- a/tests/components/smartthings/snapshots/test_lock.ambr +++ b/tests/components/smartthings/snapshots/test_lock.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_media_player.ambr b/tests/components/smartthings/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..83f9d19b9fa --- /dev/null +++ b/tests/components/smartthings/snapshots/test_media_player.ambr @@ -0,0 +1,298 @@ +# serializer version: 1 +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar', + 'is_volume_muted': False, + 'media_artist': 'Rick Astley', + 'media_title': 'Never Gonna Give You Up', + 'source': 'wifi', + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + 'supported_features': , + 'volume_level': 0.01, + }), + 'context': , + 'entity_id': 'media_player.soundbar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.galaxy_home_mini', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Galaxy Home Mini', + 'is_volume_muted': False, + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.52, + }), + 'context': , + 'entity_id': 'media_player.galaxy_home_mini', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.elliots_rum', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Elliots Rum', + 'is_volume_muted': False, + 'media_artist': 'David Guetta', + 'media_title': 'Forever Young', + 'supported_features': , + 'volume_level': 0.15, + }), + 'context': , + 'entity_id': 'media_player.elliots_rum', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar_living', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar Living', + 'is_volume_muted': False, + 'media_artist': '', + 'media_title': '', + 'source': 'HDMI1', + 'supported_features': , + 'volume_level': 0.17, + }), + 'context': , + 'entity_id': 'media_player.soundbar_living', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tv', + 'friendly_name': '[TV] Samsung 8 Series (49)', + 'is_volume_muted': True, + 'source': 'HDMI1', + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + 'supported_features': , + 'volume_level': 0.13, + }), + 'context': , + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_number.ambr b/tests/components/smartthings/snapshots/test_number.ambr new file mode 100644 index 00000000000..a5954a98cf3 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_number.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_all_entities[da_wm_wm_000001][number.washer_rinse_cycles-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.washer_rinse_cycles', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rinse cycles', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'washer_rinse_cycles', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', + 'unit_of_measurement': 'cycles', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][number.washer_rinse_cycles-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Rinse cycles', + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'cycles', + }), + 'context': , + 'entity_id': 'number.washer_rinse_cycles', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][number.washing_machine_rinse_cycles-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.washing_machine_rinse_cycles', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rinse cycles', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'washer_rinse_cycles', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', + 'unit_of_measurement': 'cycles', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][number.washing_machine_rinse_cycles-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Rinse cycles', + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'cycles', + }), + 'context': , + 'entity_id': 'number.washing_machine_rinse_cycles', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_select.ambr b/tests/components/smartthings/snapshots/test_select.ambr new file mode 100644 index 00000000000..06185e09547 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_select.ambr @@ -0,0 +1,349 @@ +# serializer version: 1 +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dishwasher', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.dishwasher', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.airdresser', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.airdresser', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][select.dryer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dryer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][select.dryer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.dryer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][select.seca_roupa-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.seca_roupa', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][select.seca_roupa-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.seca_roupa', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][select.washer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.washer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][select.washer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.washer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][select.washing_machine-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.washing_machine', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][select.washing_machine-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.washing_machine', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'run', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_sensor.ambr b/tests/components/smartthings/snapshots/test_sensor.ambr index b939547ca32..416a3d15947 100644 --- a/tests/components/smartthings/snapshots/test_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_sensor.ambr @@ -31,7 +31,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.energy', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -83,7 +83,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.power', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -135,7 +135,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.voltage', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_voltageMeasurement_voltage_voltage', 'unit_of_measurement': None, }) # --- @@ -154,6 +154,58 @@ 'state': 'unknown', }) # --- +# name: test_all_entities[aux_ac][sensor.aux_a_c_on_off_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.aux_a_c_on_off_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[aux_ac][sensor.aux_a_c_on_off_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'AUX A/C on-off Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.aux_a_c_on_off_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- # name: test_all_entities[base_electric_meter][sensor.aeon_energy_monitor_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -186,7 +238,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.energy', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -238,7 +290,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.power', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -288,7 +340,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.battery', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -339,7 +391,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.temperature', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -396,7 +448,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'alarm', - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.alarm', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_alarm_alarm_alarm', 'unit_of_measurement': None, }) # --- @@ -450,7 +502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.battery', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -501,7 +553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad.power', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -551,7 +603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.battery', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -602,7 +654,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.temperature', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -622,6 +674,416 @@ 'state': '15.0', }) # --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Air quality', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_quality', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_airQualitySensor_airQuality_airQuality', + 'unit_of_measurement': 'CAQI', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '에어모니터 플러스 Air quality', + 'state_class': , + 'unit_of_measurement': 'CAQI', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_carbonDioxideMeasurement_carbonDioxide_carbonDioxide', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': '에어모니터 플러스 Carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1045', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_relativeHumidityMeasurement_humidity_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': '에어모니터 플러스 Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_odor_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_odor_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Odor sensor', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'odor_sensor', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_odorSensor_odorLevel_odorLevel', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_odor_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '에어모니터 플러스 Odor sensor', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_odor_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_veryFineDustSensor_veryFineDustLevel_veryFineDustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': '에어모니터 플러스 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_dustLevel_dustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': '에어모니터 플러스 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_fineDustLevel_fineDustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': '에어모니터 플러스 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': '에어모니터 플러스 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.0', + }) +# --- # name: test_all_entities[da_ac_rac_000001][sensor.ac_office_granit_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -657,7 +1119,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -712,7 +1174,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.deltaEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -767,7 +1229,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energySaved_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -819,7 +1281,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.humidity', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -874,7 +1336,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.power_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -931,7 +1393,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.powerEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -983,7 +1445,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.temperature', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1033,7 +1495,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.volume', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1086,7 +1548,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -1141,7 +1603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.deltaEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1196,7 +1658,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energySaved_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -1248,7 +1710,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.humidity', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -1303,7 +1765,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.power_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -1360,7 +1822,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.powerEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1412,7 +1874,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.temperature', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1462,7 +1924,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.volume', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1512,7 +1974,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_quality', - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.airQuality', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_airQualitySensor_airQuality_airQuality', 'unit_of_measurement': 'CAQI', }) # --- @@ -1563,7 +2025,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.dustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_dustLevel_dustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1615,7 +2077,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.fineDustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_fineDustLevel_fineDustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1667,7 +2129,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.temperature', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1717,7 +2179,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -1785,7 +2247,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -1858,7 +2320,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -1939,7 +2401,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenMode', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2007,28 +2469,30 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Set point', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenSetpoint', - 'unit_of_measurement': None, + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', + 'unit_of_measurement': , }) # --- # name: test_all_entities[da_ks_microwave_0101x][sensor.microwave_set_point-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Microwave Set point', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.microwave_set_point', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0', + 'state': 'unknown', }) # --- # name: test_all_entities[da_ks_microwave_0101x][sensor.microwave_temperature-entry] @@ -2063,7 +2527,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.temperature', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2083,6 +2547,802 @@ 'state': '-17', }) # --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Oven Completion time', + }), + 'context': , + 'entity_id': 'sensor.oven_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-03-15T12:06:09+00:00', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cleaning', + 'cooking', + 'cooling', + 'draining', + 'preheat', + 'ready', + 'rinsing', + 'finished', + 'scheduled_start', + 'warming', + 'defrosting', + 'sensing', + 'searing', + 'fast_preheat', + 'scheduled_end', + 'stone_heating', + 'time_hold_preheat', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_job_state', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_ovenJobState_ovenJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Oven Job state', + 'options': list([ + 'cleaning', + 'cooking', + 'cooling', + 'draining', + 'preheat', + 'ready', + 'rinsing', + 'finished', + 'scheduled_start', + 'warming', + 'defrosting', + 'sensing', + 'searing', + 'fast_preheat', + 'scheduled_end', + 'stone_heating', + 'time_hold_preheat', + ]), + }), + 'context': , + 'entity_id': 'sensor.oven_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'preheat', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'ready', + 'running', + 'paused', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_machine_state', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Oven Machine state', + 'options': list([ + 'ready', + 'running', + 'paused', + ]), + }), + 'context': , + 'entity_id': 'sensor.oven_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_oven_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'conventional', + 'bake', + 'bottom_heat', + 'convection_bake', + 'convection_roast', + 'broil', + 'convection_broil', + 'steam_cook', + 'steam_bake', + 'steam_roast', + 'steam_bottom_heat_plus_convection', + 'microwave', + 'microwave_plus_grill', + 'microwave_plus_convection', + 'microwave_plus_hot_blast', + 'microwave_plus_hot_blast_2', + 'slim_middle', + 'slim_strong', + 'slow_cook', + 'proof', + 'dehydrate', + 'others', + 'strong_steam', + 'descale', + 'rinse', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.oven_oven_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oven mode', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_mode', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenMode_ovenMode_ovenMode', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_oven_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Oven Oven mode', + 'options': list([ + 'conventional', + 'bake', + 'bottom_heat', + 'convection_bake', + 'convection_roast', + 'broil', + 'convection_broil', + 'steam_cook', + 'steam_bake', + 'steam_roast', + 'steam_bottom_heat_plus_convection', + 'microwave', + 'microwave_plus_grill', + 'microwave_plus_convection', + 'microwave_plus_hot_blast', + 'microwave_plus_hot_blast_2', + 'slim_middle', + 'slim_strong', + 'slow_cook', + 'proof', + 'dehydrate', + 'others', + 'strong_steam', + 'descale', + 'rinse', + ]), + }), + 'context': , + 'entity_id': 'sensor.oven_oven_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bake', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_set_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_set_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Set point', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_setpoint', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_set_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Oven Set point', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_set_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '220', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ks_oven_01061][sensor.oven_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Oven Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vulcan_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Vulcan Completion time', + }), + 'context': , + 'entity_id': 'sensor.vulcan_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-03-14T03:23:28+00:00', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cleaning', + 'cooking', + 'cooling', + 'draining', + 'preheat', + 'ready', + 'rinsing', + 'finished', + 'scheduled_start', + 'warming', + 'defrosting', + 'sensing', + 'searing', + 'fast_preheat', + 'scheduled_end', + 'stone_heating', + 'time_hold_preheat', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vulcan_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_job_state', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_ovenJobState_ovenJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Vulcan Job state', + 'options': list([ + 'cleaning', + 'cooking', + 'cooling', + 'draining', + 'preheat', + 'ready', + 'rinsing', + 'finished', + 'scheduled_start', + 'warming', + 'defrosting', + 'sensing', + 'searing', + 'fast_preheat', + 'scheduled_end', + 'stone_heating', + 'time_hold_preheat', + ]), + }), + 'context': , + 'entity_id': 'sensor.vulcan_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cooking', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'ready', + 'running', + 'paused', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vulcan_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_machine_state', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Vulcan Machine state', + 'options': list([ + 'ready', + 'running', + 'paused', + ]), + }), + 'context': , + 'entity_id': 'sensor.vulcan_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_oven_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'conventional', + 'bake', + 'bottom_heat', + 'convection_bake', + 'convection_roast', + 'broil', + 'convection_broil', + 'steam_cook', + 'steam_bake', + 'steam_roast', + 'steam_bottom_heat_plus_convection', + 'microwave', + 'microwave_plus_grill', + 'microwave_plus_convection', + 'microwave_plus_hot_blast', + 'microwave_plus_hot_blast_2', + 'slim_middle', + 'slim_strong', + 'slow_cook', + 'proof', + 'dehydrate', + 'others', + 'strong_steam', + 'descale', + 'rinse', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.vulcan_oven_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oven mode', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_mode', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenMode_ovenMode_ovenMode', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_oven_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Vulcan Oven mode', + 'options': list([ + 'conventional', + 'bake', + 'bottom_heat', + 'convection_bake', + 'convection_roast', + 'broil', + 'convection_broil', + 'steam_cook', + 'steam_bake', + 'steam_roast', + 'steam_bottom_heat_plus_convection', + 'microwave', + 'microwave_plus_grill', + 'microwave_plus_convection', + 'microwave_plus_hot_blast', + 'microwave_plus_hot_blast_2', + 'slim_middle', + 'slim_strong', + 'slow_cook', + 'proof', + 'dehydrate', + 'others', + 'strong_steam', + 'descale', + 'rinse', + ]), + }), + 'context': , + 'entity_id': 'sensor.vulcan_oven_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bake', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_set_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vulcan_set_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Set point', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'oven_setpoint', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_set_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Vulcan Set point', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.vulcan_set_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '218', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vulcan_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ks_range_0101x][sensor.vulcan_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Vulcan Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.vulcan_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '218', + }) +# --- # name: test_all_entities[da_ref_normal_000001][sensor.refrigerator_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2118,7 +3378,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -2173,7 +3433,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.deltaEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -2228,7 +3488,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energySaved_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -2283,7 +3543,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.power_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -2340,7 +3600,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.powerEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -2390,7 +3650,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.battery', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -2448,7 +3708,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_cleaning_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerCleaningMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerCleaningMode_robotCleanerCleaningMode_robotCleanerCleaningMode', 'unit_of_measurement': None, }) # --- @@ -2517,7 +3777,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_movement', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerMovement', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerMovement_robotCleanerMovement_robotCleanerMovement', 'unit_of_measurement': None, }) # --- @@ -2584,7 +3844,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_turbo_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerTurboMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerTurboMode_robotCleanerTurboMode_robotCleanerTurboMode', 'unit_of_measurement': None, }) # --- @@ -2608,6 +3868,384 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_cooling_set_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_cooling_set_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cooling set point', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_cooling_setpoint', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_cooling_set_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eco Heating System Cooling set point', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_cooling_set_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '48', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8193.81', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Eco Heating System Power', + 'power_consumption_end': '2025-03-09T11:14:57Z', + 'power_consumption_start': '2025-03-09T11:14:44Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.539', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4041739669111e-06', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eco Heating System Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54.3', + }) +# --- # name: test_all_entities[da_wm_dw_000001][sensor.dishwasher_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2638,7 +4276,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -2691,7 +4329,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -2746,7 +4384,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.deltaEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -2801,7 +4439,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energySaved_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -2864,7 +4502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_job_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState', 'unit_of_measurement': None, }) # --- @@ -2930,7 +4568,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_machine_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -2988,7 +4626,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.power_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3045,7 +4683,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.powerEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3065,6 +4703,473 @@ 'state': '0.0', }) # --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'AirDresser Completion time', + }), + 'context': , + 'entity_id': 'sensor.airdresser_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-02-11T09:00:17+00:00', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '207.5', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_job_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_dryerJobState_dryerJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Job state', + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_machine_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Machine state', + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + 'power_consumption_end': '2025-02-11T08:21:17Z', + 'power_consumption_start': '2025-02-10T22:51:59Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_entities[da_wm_wd_000001][sensor.dryer_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3095,7 +5200,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -3148,7 +5253,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -3203,7 +5308,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.deltaEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3258,7 +5363,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energySaved_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -3326,7 +5431,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_job_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState', 'unit_of_measurement': None, }) # --- @@ -3397,7 +5502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_machine_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -3455,7 +5560,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.power_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3512,7 +5617,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.powerEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3532,6 +5637,473 @@ 'state': '0.0', }) # --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Seca-Roupa Completion time', + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-03-09T22:55:37+00:00', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Seca-Roupa Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '796.4', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Seca-Roupa Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Seca-Roupa Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_job_state', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_dryerJobState_dryerJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Seca-Roupa Job state', + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_machine_state', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Seca-Roupa Machine state', + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Seca-Roupa Power', + 'power_consumption_end': '2025-03-09T19:47:37Z', + 'power_consumption_start': '2025-03-09T19:47:26Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.seca_roupa_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][sensor.seca_roupa_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Seca-Roupa Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.seca_roupa_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_entities[da_wm_wm_000001][sensor.washer_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3562,7 +6134,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.completionTime', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -3615,7 +6187,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -3670,7 +6242,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.deltaEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3725,7 +6297,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energySaved_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -3794,7 +6366,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.washerJobState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -3866,7 +6438,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.machineState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -3924,7 +6496,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.power_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3981,7 +6553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.powerEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4031,7 +6603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.completionTime', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -4084,7 +6656,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4139,7 +6711,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.deltaEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4194,7 +6766,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energySaved_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4263,7 +6835,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.washerJobState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -4335,7 +6907,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.machineState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -4393,7 +6965,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.power_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -4450,7 +7022,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.powerEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4502,7 +7074,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.temperature', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -4554,7 +7126,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.humidity', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -4606,7 +7178,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.temperature', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -4626,6 +7198,109 @@ 'state': '22', }) # --- +# name: test_all_entities[ecobee_thermostat_offline][sensor.downstairs_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.downstairs_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_relativeHumidityMeasurement_humidity_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[ecobee_thermostat_offline][sensor.downstairs_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Downstairs Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.downstairs_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[ecobee_thermostat_offline][sensor.downstairs_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.downstairs_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[ecobee_thermostat_offline][sensor.downstairs_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Downstairs Temperature', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.downstairs_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[generic_ef00_v1][sensor.thermostat_kuche_link_quality-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4658,7 +7333,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'link_quality', - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.lqi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_lqi_lqi', 'unit_of_measurement': None, }) # --- @@ -4708,7 +7383,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.rssi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_rssi_rssi', 'unit_of_measurement': 'dBm', }) # --- @@ -4760,7 +7435,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.temperature', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -4780,6 +7455,55 @@ 'state': '21.0', }) # --- +# name: test_all_entities[heatit_zpushwall][sensor.livingroom_smart_switch_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.livingroom_smart_switch_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_main_battery_battery_battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[heatit_zpushwall][sensor.livingroom_smart_switch_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Livingroom smart switch Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.livingroom_smart_switch_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- # name: test_all_entities[heatit_ztrm3_thermostat][sensor.hall_thermostat_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4812,7 +7536,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.energy', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -4864,7 +7588,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.power', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -4916,7 +7640,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.temperature', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -4936,6 +7660,231 @@ 'state': '19.0', }) # --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_input_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'wifi', + 'bluetooth', + 'hdmi1', + 'hdmi2', + 'digital', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_media_input_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media input source', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'media_input_source', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_mediaInputSource_inputSource_inputSource', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_input_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Soundbar Media input source', + 'options': list([ + 'wifi', + 'bluetooth', + 'hdmi1', + 'hdmi2', + 'digital', + ]), + }), + 'context': , + 'entity_id': 'sensor.soundbar_media_input_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'wifi', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_playback_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'paused', + 'playing', + 'stopped', + 'fast_forwarding', + 'rewinding', + 'buffering', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_media_playback_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media playback status', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'media_playback_status', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_mediaPlayback_playbackStatus_playbackStatus', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_media_playback_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Soundbar Media playback status', + 'options': list([ + 'paused', + 'playing', + 'stopped', + 'fast_forwarding', + 'rewinding', + 'buffering', + ]), + }), + 'context': , + 'entity_id': 'sensor.soundbar_media_playback_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soundbar_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'audio_volume', + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_audioVolume_volume_volume', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][sensor.soundbar_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Soundbar Volume', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soundbar_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_entities[ikea_kadrilj][sensor.kitchen_ikea_kadrilj_window_blind_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.kitchen_ikea_kadrilj_window_blind_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main_battery_battery_battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[ikea_kadrilj][sensor.kitchen_ikea_kadrilj_window_blind_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Kitchen IKEA KADRILJ Window blind Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kitchen_ikea_kadrilj_window_blind_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37', + }) +# --- # name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_input_source-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4966,7 +7915,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_input_source', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.inputSource', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaInputSource_inputSource_inputSource', 'unit_of_measurement': None, }) # --- @@ -5014,7 +7963,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_repeat', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackRepeatMode', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlaybackRepeat_playbackRepeatMode_playbackRepeatMode', 'unit_of_measurement': None, }) # --- @@ -5061,7 +8010,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_shuffle', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackShuffle', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlaybackShuffle_playbackShuffle_playbackShuffle', 'unit_of_measurement': None, }) # --- @@ -5117,7 +8066,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackStatus', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -5173,7 +8122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.volume', + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -5221,7 +8170,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.battery', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -5272,7 +8221,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.temperature', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -5322,7 +8271,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'x_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c X Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_x_coordinate', 'unit_of_measurement': None, }) # --- @@ -5369,7 +8318,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'y_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate', 'unit_of_measurement': None, }) # --- @@ -5416,7 +8365,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'z_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Z Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_z_coordinate', 'unit_of_measurement': None, }) # --- @@ -5463,7 +8412,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_conditioner_mode', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.airConditionerMode', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_airConditionerMode_airConditionerMode_airConditionerMode', 'unit_of_measurement': None, }) # --- @@ -5510,7 +8459,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'thermostat_cooling_setpoint', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.coolingSetpoint', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', 'unit_of_measurement': , }) # --- @@ -5568,7 +8517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.playbackStatus', + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -5624,7 +8573,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.volume', + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -5642,6 +8591,116 @@ 'state': '15', }) # --- +# name: test_all_entities[tplink_p110][sensor.spulmaschine_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spulmaschine_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[tplink_p110][sensor.spulmaschine_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Spülmaschine Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.spulmaschine_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15.72', + }) +# --- +# name: test_all_entities[tplink_p110][sensor.spulmaschine_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spulmaschine_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[tplink_p110][sensor.spulmaschine_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Spülmaschine Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.spulmaschine_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_entities[vd_network_audio_002s][sensor.soundbar_living_media_playback_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -5681,7 +8740,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.playbackStatus', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -5737,7 +8796,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.volume', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -5755,6 +8814,57 @@ 'state': '17', }) # --- +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brightness intensity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'brightness_intensity', + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_relativeBrightness_brightnessIntensity_brightnessIntensity', + 'unit_of_measurement': 'level', + }) +# --- +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light Sensor - 55" The Frame Brightness intensity', + 'state_class': , + 'unit_of_measurement': 'level', + }), + 'context': , + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- # name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_input_source-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -5792,7 +8902,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_input_source', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.inputSource', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_mediaInputSource_inputSource_inputSource', 'unit_of_measurement': None, }) # --- @@ -5855,7 +8965,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media_playback_status', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.playbackStatus', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_mediaPlayback_playbackStatus_playbackStatus', 'unit_of_measurement': None, }) # --- @@ -5911,7 +9021,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannel', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannel_tvChannel', 'unit_of_measurement': None, }) # --- @@ -5958,7 +9068,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel_name', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannelName', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannelName_tvChannelName', 'unit_of_measurement': None, }) # --- @@ -6005,7 +9115,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.volume', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -6053,7 +9163,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.battery', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -6104,7 +9214,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.temperature', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6154,7 +9264,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.battery', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -6203,7 +9313,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158.battery', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/smartthings/snapshots/test_switch.ambr b/tests/components/smartthings/snapshots/test_switch.ambr index 81b73874a6a..8c95d2f20fc 100644 --- a/tests/components/smartthings/snapshots/test_switch.ambr +++ b/tests/components/smartthings/snapshots/test_switch.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -46,6 +46,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[da_ks_cooktop_31001][switch.induction_hob-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.induction_hob', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '808dbd84-f357-47e2-a0cd-3b66fa22d584_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][switch.induction_hob-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Induction Hob', + }), + 'context': , + 'entity_id': 'switch.induction_hob', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_ks_microwave_0101x][switch.microwave-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -76,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -93,6 +140,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.refrigerator_ice_maker', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ice maker', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ice_maker', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_icemaker_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Refrigerator Ice maker', + }), + 'context': , + 'entity_id': 'switch.refrigerator_ice_maker', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_rvc_normal_000001][switch.robot_vacuum-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -123,7 +217,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -140,6 +234,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_sac_ehs_000001_sub][switch.eco_heating_system-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.eco_heating_system', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][switch.eco_heating_system-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eco Heating System', + }), + 'context': , + 'entity_id': 'switch.eco_heating_system', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_dw_000001][switch.dishwasher-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -170,7 +311,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -187,6 +328,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_sc_000001][switch.airdresser-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.airdresser', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][switch.airdresser-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser', + }), + 'context': , + 'entity_id': 'switch.airdresser', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wd_000001][switch.dryer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -217,7 +405,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -234,6 +422,147 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_wm_wd_000001][switch.dryer_wrinkle_prevent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dryer_wrinkle_prevent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wrinkle_prevent', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][switch.dryer_wrinkle_prevent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Wrinkle prevent', + }), + 'context': , + 'entity_id': 'switch.dryer_wrinkle_prevent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.seca_roupa', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa', + }), + 'context': , + 'entity_id': 'switch.seca_roupa', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa_wrinkle_prevent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.seca_roupa_wrinkle_prevent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wrinkle_prevent', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa_wrinkle_prevent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Wrinkle prevent', + }), + 'context': , + 'entity_id': 'switch.seca_roupa_wrinkle_prevent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[da_wm_wm_000001][switch.washer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -264,7 +593,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -311,7 +640,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -328,6 +657,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.washing_machine_bubble_soak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bubble Soak', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bubble_soak', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.washerBubbleSoak_status_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Bubble Soak', + }), + 'context': , + 'entity_id': 'switch.washing_machine_bubble_soak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[generic_ef00_v1][switch.thermostat_kuche-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -358,7 +734,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -375,6 +751,53 @@ 'state': 'off', }) # --- +# name: test_all_entities[hw_q80r_soundbar][switch.soundbar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.soundbar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][switch.soundbar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Soundbar', + }), + 'context': , + 'entity_id': 'switch.soundbar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_entities[sensibo_airconditioner_1][switch.office-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -405,7 +828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -452,7 +875,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398', + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -469,6 +892,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[tplink_p110][switch.spulmaschine-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.spulmaschine', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[tplink_p110][switch.spulmaschine-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spülmaschine', + }), + 'context': , + 'entity_id': 'switch.spulmaschine', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_entities[vd_network_audio_002s][switch.soundbar_living-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -499,7 +969,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac', + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -516,6 +986,53 @@ 'state': 'on', }) # --- +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.light_sensor_55_the_frame', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light Sensor - 55" The Frame', + }), + 'context': , + 'entity_id': 'switch.light_sensor_55_the_frame', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[vd_stv_2017_k][switch.tv_samsung_8_series_49-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -546,7 +1063,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_update.ambr b/tests/components/smartthings/snapshots/test_update.ambr new file mode 100644 index 00000000000..c27a0b9f5fc --- /dev/null +++ b/tests/components/smartthings/snapshots/test_update.ambr @@ -0,0 +1,421 @@ +# serializer version: 1 +# name: test_all_entities[bosch_radiator_thermostat_ii][update.radiator_thermostat_ii_m_wohnzimmer_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.radiator_thermostat_ii_m_wohnzimmer_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[bosch_radiator_thermostat_ii][update.radiator_thermostat_ii_m_wohnzimmer_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Radiator Thermostat II [+M] Wohnzimmer Firmware', + 'in_progress': False, + 'installed_version': '2.00.09 (20009)', + 'latest_version': '2.00.09 (20009)', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.radiator_thermostat_ii_m_wohnzimmer_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[centralite][update.dimmer_debian_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.dimmer_debian_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[centralite][update.dimmer_debian_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Dimmer Debian Firmware', + 'in_progress': False, + 'installed_version': '16015010', + 'latest_version': '16015010', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.dimmer_debian_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[contact_sensor][update.front_door_open_closed_sensor_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.front_door_open_closed_sensor_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[contact_sensor][update.front_door_open_closed_sensor_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': '.Front Door Open/Closed Sensor Firmware', + 'in_progress': False, + 'installed_version': '00000103', + 'latest_version': '00000104', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.front_door_open_closed_sensor_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[ikea_kadrilj][update.kitchen_ikea_kadrilj_window_blind_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.kitchen_ikea_kadrilj_window_blind_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[ikea_kadrilj][update.kitchen_ikea_kadrilj_window_blind_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Kitchen IKEA KADRILJ Window blind Firmware', + 'in_progress': False, + 'installed_version': '22007631', + 'latest_version': '22007631', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.kitchen_ikea_kadrilj_window_blind_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[multipurpose_sensor][update.deck_door_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.deck_door_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[multipurpose_sensor][update.deck_door_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Deck Door Firmware', + 'in_progress': False, + 'installed_version': '0000001B', + 'latest_version': '0000001B', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.deck_door_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[smart_plug][update.arlo_beta_basestation_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.arlo_beta_basestation_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[smart_plug][update.arlo_beta_basestation_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Arlo Beta Basestation Firmware', + 'in_progress': False, + 'installed_version': '00102101', + 'latest_version': '00102101', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.arlo_beta_basestation_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[yale_push_button_deadbolt_lock][update.basement_door_lock_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.basement_door_lock_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[yale_push_button_deadbolt_lock][update.basement_door_lock_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Basement Door Lock Firmware', + 'in_progress': False, + 'installed_version': '00840847', + 'latest_version': '00840847', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.basement_door_lock_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_valve.ambr b/tests/components/smartthings/snapshots/test_valve.ambr new file mode 100644 index 00000000000..f82155c8499 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_valve.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_all_entities[virtual_valve][valve.volvo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.volvo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[virtual_valve][valve.volvo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'volvo', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.volvo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/smartthings/test_binary_sensor.py b/tests/components/smartthings/test_binary_sensor.py index f46be2edc89..517de034613 100644 --- a/tests/components/smartthings/test_binary_sensor.py +++ b/tests/components/smartthings/test_binary_sensor.py @@ -6,9 +6,14 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings import DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -51,3 +56,72 @@ async def test_state_update( ) assert hass.states.get("binary_sensor.refrigerator_door").state == STATE_ON + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("device_fixture", "issue_string", "entity_id"), + [ + ("virtual_valve", "valve", "binary_sensor.volvo_valve"), + ("da_ref_normal_000001", "fridge_door", "binary_sensor.refrigerator_door"), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + issue_string: str, + entity_id: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_binary_{issue_string}_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_button.py b/tests/components/smartthings/test_button.py new file mode 100644 index 00000000000..4a348d079ca --- /dev/null +++ b/tests/components/smartthings/test_button.py @@ -0,0 +1,56 @@ +"""Test for the SmartThings button platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pysmartthings import Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.BUTTON) + + +@pytest.mark.parametrize("device_fixture", ["da_ks_microwave_0101x"]) +async def test_press( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + freezer.move_to("2023-10-21") + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.microwave_stop"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "2bad3237-4886-e699-1b90-4a51a3d55c8a", + Capability.OVEN_OPERATING_STATE, + Command.STOP, + MAIN, + ) diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 7472d7d6b71..d6e8ef03290 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -10,6 +10,7 @@ from homeassistant.components.smartthings.const import ( CONF_INSTALLED_APP_ID, CONF_LOCATION_ID, CONF_REFRESH_TOKEN, + CONF_SUBSCRIPTION_ID, DOMAIN, ) from homeassistant.config_entries import SOURCE_USER, ConfigEntryState @@ -508,10 +509,11 @@ async def test_migration( "installed_app_id": "123123123-2be1-4e40-b257-e4ef59083324", }, CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_SUBSCRIPTION_ID: "f5768ce8-c9e5-4507-9020-912c0c60e0ab", } assert mock_old_config_entry.unique_id == "397678e5-9995-4a39-9d9f-ae6ba310236c" assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host", "use_cloud") @@ -584,7 +586,7 @@ async def test_migration_wrong_location( == "appid123-2be1-4e40-b257-e4ef59083324_397678e5-9995-4a39-9d9f-ae6ba310236c" ) assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host") diff --git a/tests/components/smartthings/test_diagnostics.py b/tests/components/smartthings/test_diagnostics.py index f486c19de14..b28a3a1aff5 100644 --- a/tests/components/smartthings/test_diagnostics.py +++ b/tests/components/smartthings/test_diagnostics.py @@ -30,9 +30,9 @@ async def test_config_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test generating diagnostics for a device entry.""" - mock_smartthings.get_raw_devices.return_value = load_json_object_fixture( - "devices/da_ac_rac_000001.json", DOMAIN - ) + mock_smartthings.get_raw_devices.return_value = [ + load_json_object_fixture("devices/da_ac_rac_000001.json", DOMAIN) + ] await setup_integration(hass, mock_config_entry) assert ( await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) diff --git a/tests/components/smartthings/test_event.py b/tests/components/smartthings/test_event.py new file mode 100644 index 00000000000..bdca7674981 --- /dev/null +++ b/tests/components/smartthings/test_event.py @@ -0,0 +1,61 @@ +"""Test for the SmartThings event platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pysmartthings import Attribute, Capability +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.EVENT) + + +@pytest.mark.parametrize("device_fixture", ["heatit_zpushwall"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + + await trigger_update( + hass, + devices, + "5e5b97f3-3094-44e6-abc0-f61283412d6a", + Capability.BUTTON, + Attribute.BUTTON, + "pushed", + component="button1", + ) + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state + == "2023-10-21T00:00:00.000+00:00" + ) diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index e3d865fc5c8..991f44e4377 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -1,15 +1,40 @@ """Tests for the SmartThings component init module.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from pysmartthings import Attribute, Capability, DeviceResponse, DeviceStatus +from aiohttp import ClientResponseError, RequestInfo +from pysmartthings import ( + Attribute, + Capability, + DeviceResponse, + DeviceStatus, + Lifecycle, + SmartThingsSinkError, + Subscription, +) import pytest from syrupy import SnapshotAssertion +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.smartthings import EVENT_BUTTON -from homeassistant.components.smartthings.const import DOMAIN +from homeassistant.components.smartthings.const import ( + CONF_INSTALLED_APP_ID, + CONF_LOCATION_ID, + CONF_SUBSCRIPTION_ID, + DOMAIN, + SCOPES, +) +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from . import setup_integration, trigger_update @@ -63,6 +88,178 @@ async def test_button_event( assert events[0] == snapshot +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_create_subscription( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test creating a subscription.""" + assert CONF_SUBSCRIPTION_ID not in mock_config_entry.data + + await setup_integration(hass, mock_config_entry) + + devices.create_subscription.assert_called_once() + + assert ( + mock_config_entry.data[CONF_SUBSCRIPTION_ID] + == "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + + devices.subscribe.assert_called_once_with( + "397678e5-9995-4a39-9d9f-ae6ba310236c", + "5aaaa925-2be1-4e40-b257-e4ef59083324", + Subscription.from_json(load_fixture("subscription.json", DOMAIN)), + ) + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_create_subscription_sink_error( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test handling an error when creating a subscription.""" + assert CONF_SUBSCRIPTION_ID not in mock_config_entry.data + + devices.create_subscription.side_effect = SmartThingsSinkError("Sink error") + + await setup_integration(hass, mock_config_entry) + + devices.subscribe.assert_not_called() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert CONF_SUBSCRIPTION_ID not in mock_config_entry.data + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_update_subscription_identifier( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test updating the subscription identifier.""" + await setup_integration(hass, mock_config_entry) + + assert ( + mock_config_entry.data[CONF_SUBSCRIPTION_ID] + == "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + + devices.new_subscription_id_callback("abc") + + await hass.async_block_till_done() + + assert mock_config_entry.data[CONF_SUBSCRIPTION_ID] == "abc" + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_stale_subscription_id( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test updating the subscription identifier.""" + mock_config_entry.add_to_hass(hass) + + hass.config_entries.async_update_entry( + mock_config_entry, + data={**mock_config_entry.data, CONF_SUBSCRIPTION_ID: "test"}, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + mock_config_entry.data[CONF_SUBSCRIPTION_ID] + == "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + devices.delete_subscription.assert_called_once_with("test") + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_remove_subscription_identifier( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test removing the subscription identifier.""" + await setup_integration(hass, mock_config_entry) + + assert ( + mock_config_entry.data[CONF_SUBSCRIPTION_ID] + == "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + + devices.new_subscription_id_callback(None) + + await hass.async_block_till_done() + + assert mock_config_entry.data[CONF_SUBSCRIPTION_ID] is None + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_max_connections_handling( + hass: HomeAssistant, devices: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test handling reaching max connections.""" + await setup_integration(hass, mock_config_entry) + + assert ( + mock_config_entry.data[CONF_SUBSCRIPTION_ID] + == "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + + devices.create_subscription.side_effect = SmartThingsSinkError("Sink error") + + devices.max_connections_reached_callback() + + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_unloading( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test unloading the integration.""" + await setup_integration(hass, mock_config_entry) + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + devices.delete_subscription.assert_called_once_with( + "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + # Deleting the subscription automatically deletes the subscription ID + devices.new_subscription_id_callback(None) + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + assert mock_config_entry.data[CONF_SUBSCRIPTION_ID] is None + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_shutdown( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test shutting down Home Assistant.""" + await setup_integration(hass, mock_config_entry) + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + devices.delete_subscription.assert_called_once_with( + "f5768ce8-c9e5-4507-9020-912c0c60e0ab" + ) + # Deleting the subscription automatically deletes the subscription ID + devices.new_subscription_id_callback(None) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_config_entry.data[CONF_SUBSCRIPTION_ID] is None + + @pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) async def test_removing_stale_devices( hass: HomeAssistant, @@ -83,6 +280,57 @@ async def test_removing_stale_devices( assert not device_registry.async_get_device({(DOMAIN, "aaa-bbb-ccc")}) +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_refreshing_expired_token( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test removing stale devices.""" + with patch( + "homeassistant.components.smartthings.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + request_info=RequestInfo( + url="http://example.com", + method="GET", + headers={}, + real_url="http://example.com", + ), + status=400, + history=(), + ), + ): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + assert len(hass.config_entries.flow.async_progress()) == 1 + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_error_refreshing_token( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test removing stale devices.""" + with patch( + "homeassistant.components.smartthings.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + request_info=RequestInfo( + url="http://example.com", + method="GET", + headers={}, + real_url="http://example.com", + ), + status=500, + history=(), + ), + ): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + async def test_hub_via_device( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -112,3 +360,276 @@ async def test_hub_via_device( ).via_device_id == hub_device.id ) + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_deleted_device_runtime( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test devices that are deleted in runtime.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("climate.ac_office_granit").state == HVACMode.OFF + + for call in devices.add_device_lifecycle_event_listener.call_args_list: + if call[0][0] == Lifecycle.DELETE: + call[0][1]("96a5ef74-5832-a84b-f1f7-ca799957065d") + await hass.async_block_till_done() + + assert hass.states.get("climate.ac_office_granit") is None + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "multipurpose_sensor", + BINARY_SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c.contact", + "deck_door", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact", + ), + ( + "multipurpose_sensor", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate", + "deck_door_y_coordinate", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate", + ), + ( + "da_ac_rac_000001", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d.energy_meter", + "ac_office_granit_energy", + "7d246592-93db-4d72-a10d-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter", + ), + ( + "da_ac_rac_000001", + CLIMATE_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d", + "ac_office_granit", + "7d246592-93db-4d72-a10d-ca799957065d_main", + ), + ( + "c2c_shade", + COVER_DOMAIN, + "571af102-15db-4030-b76b-245a691f74a5", + "curtain_1a", + "571af102-15db-4030-b76b-245a691f74a5_main", + ), + ( + "generic_fan_3_speed", + FAN_DOMAIN, + "6d95a8b7-4ee3-429a-a13a-00ec9354170c", + "bedroom_fan", + "6d95a8b7-4ee3-429a-a13a-00ec9354170c_main", + ), + ( + "hue_rgbw_color_bulb", + LIGHT_DOMAIN, + "cb958955-b015-498c-9e62-fc0c51abd054", + "standing_light", + "cb958955-b015-498c-9e62-fc0c51abd054_main", + ), + ( + "yale_push_button_deadbolt_lock", + LOCK_DOMAIN, + "a9f587c5-5d8b-4273-8907-e7f609af5158", + "basement_door_lock", + "a9f587c5-5d8b-4273-8907-e7f609af5158_main", + ), + ( + "smart_plug", + SWITCH_DOMAIN, + "550a1c72-65a0-4d55-b97b-75168e055398", + "arlo_beta_basestation", + "550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch", + ), + ], +) +async def test_entity_unique_id_migration( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "other_unique_id", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "microwave_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "microwave_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47.washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ], +) +async def test_entity_unique_id_migration_machine_state( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + other_unique_id: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + domain, + DOMAIN, + other_unique_id, + config_entry=mock_config_entry, + suggested_object_id="job_state", + ) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id diff --git a/tests/components/smartthings/test_media_player.py b/tests/components/smartthings/test_media_player.py new file mode 100644 index 00000000000..b7cecfe8408 --- /dev/null +++ b/tests/components/smartthings/test_media_player.py @@ -0,0 +1,432 @@ +"""Test for the SmartThings media player platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command, Status +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + RepeatMode, +) +from homeassistant.components.smartthings.const import MAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_OFF, + STATE_PLAYING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities( + hass, entity_registry, snapshot, Platform.MEDIA_PLAYER + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("action", "command"), + [ + (SERVICE_TURN_ON, Command.ON), + (SERVICE_TURN_OFF, Command.OFF), + ], +) +async def test_turn_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + command: Command, +) -> None: + """Test media player turn on and off command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", Capability.SWITCH, command, MAIN + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("muted", "argument"), + [ + (True, "muted"), + (False, "unmuted"), + ], +) +async def test_mute_unmute( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + muted: bool, + argument: str, +) -> None: + """Test media player mute and unmute command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_MUTED: muted}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_MUTE, + Command.SET_MUTE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_set_volume_level( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player set volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_LEVEL: 0.31}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + MAIN, + argument=31, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_up( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player increase volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_down( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player decrease volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_play( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player play command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PLAY, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_pause( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player pause command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PAUSE, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_stop( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.STOP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_previous_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player previous track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["rewind"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.REWIND, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_next_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player next track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["fastForward"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_select_source( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_INPUT_SOURCE: "digital"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + MAIN, + "digital", + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("shuffle", "argument"), + [ + (True, "enabled"), + (False, "disabled"), + ], +) +async def test_media_shuffle_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + shuffle: bool, + argument: bool, +) -> None: + """Test media player media shuffle command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_SHUFFLE] = { + Attribute.PLAYBACK_SHUFFLE: Status(True) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_SHUFFLE: shuffle}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("repeat", "argument"), + [ + (RepeatMode.OFF, "off"), + (RepeatMode.ONE, "one"), + (RepeatMode.ALL, "all"), + ], +) +async def test_media_repeat_mode( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + repeat: RepeatMode, + argument: bool, +) -> None: + """Test media player repeat mode command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_REPEAT] = { + Attribute.REPEAT_MODE: Status("one") + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_REPEAT: repeat}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("media_player.soundbar").state == STATE_PLAYING + + await trigger_update( + hass, + devices, + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.SWITCH, + Attribute.SWITCH, + "off", + ) + + assert hass.states.get("media_player.soundbar").state == STATE_OFF diff --git a/tests/components/smartthings/test_number.py b/tests/components/smartthings/test_number.py new file mode 100644 index 00000000000..578b94e050f --- /dev/null +++ b/tests/components/smartthings/test_number.py @@ -0,0 +1,81 @@ +"""Test for the SmartThings number platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.NUMBER) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wm_000001"]) +async def test_set_value( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting a value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: "number.washer_rinse_cycles", ATTR_VALUE: 3}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "f984b91d-f250-9d42-3436-33f09a422a47", + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Command.SET_WASHER_RINSE_CYCLES, + MAIN, + argument="3", + ) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wm_000001"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("number.washer_rinse_cycles").state == "2" + + await trigger_update( + hass, + devices, + "f984b91d-f250-9d42-3436-33f09a422a47", + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Attribute.WASHER_RINSE_CYCLES, + "3", + ) + + assert hass.states.get("number.washer_rinse_cycles").state == "3" diff --git a/tests/components/smartthings/test_select.py b/tests/components/smartthings/test_select.py new file mode 100644 index 00000000000..2c5c55239f2 --- /dev/null +++ b/tests/components/smartthings/test_select.py @@ -0,0 +1,121 @@ +"""Test for the SmartThings select platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import ( + set_attribute_value, + setup_integration, + snapshot_smartthings_entities, + trigger_update, +) + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.SELECT) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("select.dryer").state == "stop" + + await trigger_update( + hass, + devices, + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.DRYER_OPERATING_STATE, + Attribute.MACHINE_STATE, + "run", + ) + + assert hass.states.get("select.dryer").state == "run" + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_select_option( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + set_attribute_value( + devices, + Capability.REMOTE_CONTROL_STATUS, + Attribute.REMOTE_CONTROL_ENABLED, + "true", + ) + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: "select.dryer", ATTR_OPTION: "run"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.DRYER_OPERATING_STATE, + Command.SET_MACHINE_STATE, + MAIN, + argument="run", + ) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_select_option_without_remote_control( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + set_attribute_value( + devices, + Capability.REMOTE_CONTROL_STATUS, + Attribute.REMOTE_CONTROL_ENABLED, + "false", + ) + await setup_integration(hass, mock_config_entry) + + with pytest.raises( + ServiceValidationError, + match="Can only be updated when remote control is enabled", + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: "select.dryer", ATTR_OPTION: "run"}, + blocking=True, + ) + devices.execute_device_command.assert_not_called() diff --git a/tests/components/smartthings/test_sensor.py b/tests/components/smartthings/test_sensor.py index c83950de9e9..cf49d02b910 100644 --- a/tests/components/smartthings/test_sensor.py +++ b/tests/components/smartthings/test_sensor.py @@ -6,9 +6,14 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings.const import DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -49,3 +54,92 @@ async def test_state_update( ) assert hass.states.get("sensor.ac_office_granit_temperature").state == "20" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("device_fixture", "entity_id", "translation_key"), + [ + ("da_wm_wm_000001", "sensor.washer_machine_state", "machine_state"), + ("da_wm_wd_000001", "sensor.dryer_machine_state", "machine_state"), + ("hw_q80r_soundbar", "sensor.soundbar_volume", "media_player"), + ("hw_q80r_soundbar", "sensor.soundbar_media_playback_status", "media_player"), + ("hw_q80r_soundbar", "sensor.soundbar_media_input_source", "media_player"), + ( + "im_speaker_ai_0001", + "sensor.galaxy_home_mini_media_playback_shuffle", + "media_player", + ), + ( + "im_speaker_ai_0001", + "sensor.galaxy_home_mini_media_playback_repeat", + "media_player", + ), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + entity_id: str, + translation_key: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_{translation_key}_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_{translation_key}" + assert issue.translation_placeholders == { + "entity": entity_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_switch.py b/tests/components/smartthings/test_switch.py index a1e420a8edb..2e360ff68e3 100644 --- a/tests/components/smartthings/test_switch.py +++ b/tests/components/smartthings/test_switch.py @@ -6,7 +6,10 @@ from pysmartthings import Attribute, Capability, Command import pytest from syrupy import SnapshotAssertion -from homeassistant.components.smartthings.const import MAIN +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings.const import DOMAIN, MAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -17,7 +20,8 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -66,6 +70,39 @@ async def test_switch_turn_on_off( ) +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +@pytest.mark.parametrize( + ("action", "argument"), + [ + (SERVICE_TURN_ON, "on"), + (SERVICE_TURN_OFF, "off"), + ], +) +async def test_command_switch_turn_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + argument: str, +) -> None: + """Test switch turn on and off command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + action, + {ATTR_ENTITY_ID: "switch.dryer_wrinkle_prevent"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.CUSTOM_DRYER_WRINKLE_PREVENT, + Command.SET_DRYER_WRINKLE_PREVENT, + MAIN, + argument, + ) + + @pytest.mark.parametrize("device_fixture", ["c2c_arlo_pro_3_switch"]) async def test_state_update( hass: HomeAssistant, @@ -87,3 +124,80 @@ async def test_state_update( ) assert hass.states.get("switch.2nd_floor_hallway").state == STATE_OFF + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("device_fixture", "entity_id", "translation_key"), + [ + ("da_wm_wm_000001", "switch.washer", "deprecated_switch_appliance"), + ("da_wm_wd_000001", "switch.dryer", "deprecated_switch_appliance"), + ("hw_q80r_soundbar", "switch.soundbar", "deprecated_switch_media_player"), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + entity_id: str, + translation_key: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == translation_key + assert issue.translation_placeholders == { + "entity": entity_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_update.py b/tests/components/smartthings/test_update.py new file mode 100644 index 00000000000..8c3d9e1a968 --- /dev/null +++ b/tests/components/smartthings/test_update.py @@ -0,0 +1,142 @@ +"""Test for the SmartThings update platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.smartthings.const import MAIN +from homeassistant.components.update import ( + ATTR_IN_PROGRESS, + DOMAIN as UPDATE_DOMAIN, + SERVICE_INSTALL, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.UPDATE) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_installing_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test installing an update.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: "update.front_door_open_closed_sensor_firmware"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Command.UPDATE_FIRMWARE, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").state + == STATE_ON + ) + + await trigger_update( + hass, + devices, + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Attribute.CURRENT_VERSION, + "00000104", + ) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").state + == STATE_OFF + ) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_state_progress_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state progress update.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").attributes[ + ATTR_IN_PROGRESS + ] + is False + ) + + await trigger_update( + hass, + devices, + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Attribute.STATE, + "updateInProgress", + ) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").attributes[ + ATTR_IN_PROGRESS + ] + is True + ) + + +@pytest.mark.parametrize("device_fixture", ["centralite"]) +async def test_state_update_available( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update available.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("update.dimmer_debian_firmware").state == STATE_OFF + + await trigger_update( + hass, + devices, + "d0268a69-abfb-4c92-a646-61cec2e510ad", + Capability.FIRMWARE_UPDATE, + Attribute.AVAILABLE_VERSION, + "16015011", + ) + + assert hass.states.get("update.dimmer_debian_firmware").state == STATE_ON diff --git a/tests/components/smartthings/test_valve.py b/tests/components/smartthings/test_valve.py new file mode 100644 index 00000000000..f0ba34c8264 --- /dev/null +++ b/tests/components/smartthings/test_valve.py @@ -0,0 +1,87 @@ +"""Test for the SmartThings valve platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.smartthings import MAIN +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.VALVE) + + +@pytest.mark.parametrize("device_fixture", ["virtual_valve"]) +@pytest.mark.parametrize( + ("action", "command"), + [ + (SERVICE_OPEN_VALVE, Command.OPEN), + (SERVICE_CLOSE_VALVE, Command.CLOSE), + ], +) +async def test_valve_open_close( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + command: Command, +) -> None: + """Test valve open and close command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + VALVE_DOMAIN, + action, + {ATTR_ENTITY_ID: "valve.volvo"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3", Capability.VALVE, command, MAIN + ) + + +@pytest.mark.parametrize("device_fixture", ["virtual_valve"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("valve.volvo").state == ValveState.CLOSED + + await trigger_update( + hass, + devices, + "612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3", + Capability.VALVE, + Attribute.VALVE, + "open", + ) + + assert hass.states.get("valve.volvo").state == ValveState.OPEN diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index c8933029ce6..4ecfe9366e3 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -193,7 +193,7 @@ async def test_zeroconf_flow_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_zeroconf_unsupported_abort( @@ -406,7 +406,7 @@ async def test_user_invalid_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_user_cannot_connect( @@ -443,7 +443,7 @@ async def test_user_cannot_connect( assert result2["title"] == "SLZB-06p7" assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_auth_cannot_connect( diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index f130d7ccf30..bec73bc514a 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -2,17 +2,18 @@ from unittest.mock import MagicMock -from pysmlight import Sensors +from pysmlight import Info, Sensors import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.smlight.const import DOMAIN from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform pytestmark = [ pytest.mark.usefixtures( @@ -73,3 +74,38 @@ async def test_zigbee_uptime_disconnected( state = hass.states.get("sensor.mock_title_zigbee_uptime") assert state.state == STATE_UNKNOWN + + +async def test_zigbee2_temp_sensor( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zb_temp2 if device has second radio.""" + mock_smlight_client.get_sensors.return_value = Sensors(zb_temp2=20.45) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_chip_temp_2") + assert state + assert state.state == "20.45" + + +async def test_zigbee_type_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zigbee type sensor with second radio.""" + mock_smlight_client.get_info.side_effect = None + mock_smlight_client.get_info.return_value = Info.from_dict( + load_json_object_fixture("info-MR1.json", DOMAIN) + ) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_type") + assert state + assert state.state == "coordinator" + + state = hass.states.get("sensor.mock_title_zigbee_type_2") + assert state + assert state.state == "router" diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py index 86d19968910..d120a08d519 100644 --- a/tests/components/smlight/test_update.py +++ b/tests/components/smlight/test_update.py @@ -154,10 +154,9 @@ async def test_update_zigbee2_firmware( mock_smlight_client: MagicMock, ) -> None: """Test update of zigbee2 firmware where available.""" + mock_info = Info.from_dict(load_json_object_fixture("info-MR1.json", DOMAIN)) mock_smlight_client.get_info.side_effect = None - mock_smlight_client.get_info.return_value = Info.from_dict( - load_json_object_fixture("info-MR1.json", DOMAIN) - ) + mock_smlight_client.get_info.return_value = mock_info await setup_integration(hass, mock_config_entry) entity_id = "update.mock_title_zigbee_firmware_2" state = hass.states.get(entity_id) @@ -177,17 +176,17 @@ async def test_update_zigbee2_firmware( event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) event_function(MOCK_FIRMWARE_DONE) - with patch( - "homeassistant.components.smlight.update.get_radio", return_value=MOCK_RADIO - ): - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" - assert state.attributes[ATTR_LATEST_VERSION] == "20240716" + mock_info.radios[1] = MOCK_RADIO + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" + assert state.attributes[ATTR_LATEST_VERSION] == "20240716" async def test_update_legacy_firmware_v2( diff --git a/tests/components/snoo/__init__.py b/tests/components/snoo/__init__.py index f8529251720..b4692e6f08b 100644 --- a/tests/components/snoo/__init__.py +++ b/tests/components/snoo/__init__.py @@ -1,5 +1,11 @@ """Tests for the Happiest Baby Snoo integration.""" +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooData + from homeassistant.components.snoo.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -36,3 +42,13 @@ async def async_init_integration(hass: HomeAssistant) -> ConfigEntry: await hass.async_block_till_done() return entry + + +def find_update_callback( + mock: AsyncMock, serial_number: str +) -> Callable[[SnooData], Awaitable[None]]: + """Find the update callback for a specific identifier.""" + for call in mock.subscribe.call_args_list: + if call[0][0].serialNumber == serial_number: + return call[0][1] + pytest.fail(f"Callback for identifier {serial_number} not found") diff --git a/tests/components/snoo/conftest.py b/tests/components/snoo/conftest.py index 33642e67ff5..6163fa56b7f 100644 --- a/tests/components/snoo/conftest.py +++ b/tests/components/snoo/conftest.py @@ -5,9 +5,8 @@ from unittest.mock import AsyncMock, patch import pytest from python_snoo.containers import SnooDevice -from python_snoo.snoo import Snoo -from .const import MOCK_AMAZON_AUTH, MOCK_SNOO_AUTH, MOCK_SNOO_DEVICES +from .const import MOCK_SNOO_DEVICES, MOCKED_AUTH @pytest.fixture @@ -19,55 +18,14 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -class MockedSnoo(Snoo): - """Mock the Snoo object.""" - - def __init__(self, email, password, clientsession) -> None: - """Set up a Mocked Snoo.""" - super().__init__(email, password, clientsession) - self.auth_error = None - - async def subscribe(self, device: SnooDevice, function): - """Mock the subscribe function.""" - return AsyncMock() - - async def send_command(self, command: str, device: SnooDevice, **kwargs): - """Mock the send command function.""" - return AsyncMock() - - async def authorize(self): - """Do normal auth flow unless error is patched.""" - if self.auth_error: - raise self.auth_error - return await super().authorize() - - def set_auth_error(self, error: Exception | None): - """Set an error for authentication.""" - self.auth_error = error - - async def auth_amazon(self): - """Mock the amazon auth.""" - return MOCK_AMAZON_AUTH - - async def auth_snoo(self, id_token): - """Mock the snoo auth.""" - return MOCK_SNOO_AUTH - - async def schedule_reauthorization(self, snoo_expiry: int): - """Mock scheduling reauth.""" - return AsyncMock() - - async def get_devices(self) -> list[SnooDevice]: - """Move getting devices.""" - return [SnooDevice.from_dict(dev) for dev in MOCK_SNOO_DEVICES] - - @pytest.fixture(name="bypass_api") -def bypass_api() -> MockedSnoo: +def bypass_api() -> Generator[AsyncMock]: """Bypass the Snoo api.""" - api = MockedSnoo("email", "password", AsyncMock()) with ( - patch("homeassistant.components.snoo.Snoo", return_value=api), - patch("homeassistant.components.snoo.config_flow.Snoo", return_value=api), + patch("homeassistant.components.snoo.Snoo", autospec=True) as mock_client, + patch("homeassistant.components.snoo.config_flow.Snoo", new=mock_client), ): - yield api + client = mock_client.return_value + client.get_devices.return_value = [SnooDevice.from_dict(MOCK_SNOO_DEVICES[0])] + client.authorize.return_value = MOCKED_AUTH + yield client diff --git a/tests/components/snoo/const.py b/tests/components/snoo/const.py index c5d53780fa1..2657048afb8 100644 --- a/tests/components/snoo/const.py +++ b/tests/components/snoo/const.py @@ -1,5 +1,9 @@ """Snoo constants for testing.""" +import time + +from python_snoo.containers import AuthorizationInfo, SnooData + MOCK_AMAZON_AUTH = { # This is a JWT with random values. "AccessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWIyYzNkNC1lNWY2" @@ -32,3 +36,36 @@ MOCK_SNOO_DEVICES = [ "provisionedAt": "random_time", } ] + +MOCK_SNOO_DATA = SnooData.from_dict( + { + "system_state": "normal", + "sw_version": "v1.14.27", + "state_machine": { + "session_id": "0", + "state": "ONLINE", + "is_active_session": "false", + "since_session_start_ms": -1, + "time_left": -1, + "hold": "off", + "weaning": "off", + "audio": "on", + "up_transition": "NONE", + "down_transition": "NONE", + "sticky_white_noise": "off", + }, + "left_safety_clip": 1, + "right_safety_clip": 1, + "event": "status_requested", + "event_time_ms": int(time.time()), + "rx_signal": {"rssi": -45, "strength": 100}, + } +) + + +MOCKED_AUTH = AuthorizationInfo( + snoo=MOCK_SNOO_AUTH, + aws_access=MOCK_AMAZON_AUTH["AccessToken"], + aws_id=MOCK_AMAZON_AUTH["IdToken"], + aws_refresh=MOCK_AMAZON_AUTH["RefreshToken"], +) diff --git a/tests/components/snoo/test_binary_sensor.py b/tests/components/snoo/test_binary_sensor.py new file mode 100644 index 00000000000..77b2e36c1fe --- /dev/null +++ b/tests/components/snoo/test_binary_sensor.py @@ -0,0 +1,30 @@ +"""Test Snoo Binary Sensors.""" + +from unittest.mock import AsyncMock + +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_binary_sensors(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test binary sensors and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("binary_sensor")) == 2 + assert ( + hass.states.get("binary_sensor.test_snoo_left_safety_clip").state + == STATE_UNAVAILABLE + ) + assert ( + hass.states.get("binary_sensor.test_snoo_right_safety_clip").state + == STATE_UNAVAILABLE + ) + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("binary_sensor")) == 2 + assert hass.states.get("binary_sensor.test_snoo_left_safety_clip").state == STATE_ON + assert ( + hass.states.get("binary_sensor.test_snoo_right_safety_clip").state == STATE_ON + ) diff --git a/tests/components/snoo/test_config_flow.py b/tests/components/snoo/test_config_flow.py index ffdfb22142d..9e07f011cd4 100644 --- a/tests/components/snoo/test_config_flow.py +++ b/tests/components/snoo/test_config_flow.py @@ -13,11 +13,10 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import create_entry -from .conftest import MockedSnoo async def test_config_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: MockedSnoo + hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: AsyncMock ) -> None: """Test we create the entry successfully.""" result = await hass.config_entries.flow.async_init( @@ -55,7 +54,7 @@ async def test_config_flow_success( async def test_form_auth_issues( hass: HomeAssistant, mock_setup_entry: AsyncMock, - bypass_api: MockedSnoo, + bypass_api: AsyncMock, exception, error_msg, ) -> None: @@ -64,7 +63,7 @@ async def test_form_auth_issues( DOMAIN, context={"source": config_entries.SOURCE_USER} ) # Set Authorize to fail. - bypass_api.set_auth_error(exception) + bypass_api.authorize.side_effect = exception result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -73,10 +72,9 @@ async def test_form_auth_issues( }, ) # Reset auth back to the original - bypass_api.set_auth_error(None) assert result["type"] == FlowResultType.FORM assert result["errors"] == {"base": error_msg} - + bypass_api.authorize.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -84,7 +82,6 @@ async def test_form_auth_issues( CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result["type"] == FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -96,7 +93,7 @@ async def test_form_auth_issues( async def test_account_already_configured( - hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api + hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: AsyncMock ) -> None: """Ensure we abort if the config flow already exists.""" create_entry(hass) diff --git a/tests/components/snoo/test_event.py b/tests/components/snoo/test_event.py new file mode 100644 index 00000000000..41cb386a599 --- /dev/null +++ b/tests/components/snoo/test_event.py @@ -0,0 +1,45 @@ +"""Test Snoo Events.""" + +from unittest.mock import AsyncMock + +from freezegun import freeze_time + +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +@freeze_time("2025-01-01 12:00:00") +async def test_events(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test events and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("event")) == 1 + assert hass.states.get("event.test_snoo_snoo_event").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("event")) == 1 + assert ( + hass.states.get("event.test_snoo_snoo_event").state + == "2025-01-01T12:00:00.000+00:00" + ) + + +@freeze_time("2025-01-01 12:00:00") +async def test_events_data_on_startup( + hass: HomeAssistant, bypass_api: AsyncMock +) -> None: + """Test events and check test values are correctly set if data exists on first update.""" + + def update_status(_): + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + + bypass_api.get_status.side_effect = update_status + await async_init_integration(hass) + await hass.async_block_till_done() + assert len(hass.states.async_all("event")) == 1 + assert ( + hass.states.get("event.test_snoo_snoo_event").state + == "2025-01-01T12:00:00.000+00:00" + ) diff --git a/tests/components/snoo/test_init.py b/tests/components/snoo/test_init.py index 06f420b6518..72c4b6fb8ab 100644 --- a/tests/components/snoo/test_init.py +++ b/tests/components/snoo/test_init.py @@ -1,14 +1,32 @@ """Test init for Snoo.""" +from unittest.mock import AsyncMock + +from python_snoo.exceptions import SnooAuthException + +from homeassistant.components.snoo import SnooDeviceError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from . import async_init_integration -from .conftest import MockedSnoo -async def test_async_setup_entry(hass: HomeAssistant, bypass_api: MockedSnoo) -> None: +async def test_async_setup_entry(hass: HomeAssistant, bypass_api: AsyncMock) -> None: """Test a successful setup entry.""" entry = await async_init_integration(hass) assert len(hass.states.async_all("sensor")) == 2 assert entry.state == ConfigEntryState.LOADED + + +async def test_cannot_auth(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test that we are put into retry when we fail to auth.""" + bypass_api.authorize.side_effect = SnooAuthException + entry = await async_init_integration(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_failed_devices(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test that we are put into retry when we fail to get devices.""" + bypass_api.get_devices.side_effect = SnooDeviceError + entry = await async_init_integration(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/snoo/test_select.py b/tests/components/snoo/test_select.py new file mode 100644 index 00000000000..e00721b2ab8 --- /dev/null +++ b/tests/components/snoo/test_select.py @@ -0,0 +1,75 @@ +"""Test Snoo Selects.""" + +import copy +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooDevice, SnooLevels, SnooStates + +from homeassistant.components.select import SERVICE_SELECT_OPTION +from homeassistant.components.snoo.select import SnooCommandException +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_select(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test select and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("select")) == 1 + assert hass.states.get("select.test_snoo_intensity").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("select")) == 1 + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + +async def test_update_success(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test changing values for select entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + async def update_level(device: SnooDevice, level: SnooStates, _hold: bool = False): + new_data = copy.deepcopy(MOCK_SNOO_DATA) + new_data.state_machine.level = SnooLevels(level.value) + find_update_callback(bypass_api, device.serialNumber)(new_data) + + bypass_api.set_level.side_effect = update_level + await hass.services.async_call( + "select", + SERVICE_SELECT_OPTION, + service_data={"option": "level1"}, + blocking=True, + target={"entity_id": "select.test_snoo_intensity"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("select.test_snoo_intensity").state == "level1" + + +async def test_update_failed(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test failing to change values for select entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + bypass_api.set_level.side_effect = SnooCommandException + with pytest.raises( + HomeAssistantError, match="Error while updating Intensity to level1" + ): + await hass.services.async_call( + "select", + SERVICE_SELECT_OPTION, + service_data={"option": "level1"}, + blocking=True, + target={"entity_id": "select.test_snoo_intensity"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("select.test_snoo_intensity").state == "stop" diff --git a/tests/components/snoo/test_sensor.py b/tests/components/snoo/test_sensor.py new file mode 100644 index 00000000000..96a22e548b8 --- /dev/null +++ b/tests/components/snoo/test_sensor.py @@ -0,0 +1,22 @@ +"""Test Snoo Sensors.""" + +from unittest.mock import AsyncMock + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_sensors(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test sensors and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("sensor")) == 2 + assert hass.states.get("sensor.test_snoo_state").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.test_snoo_time_left").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 2 + assert hass.states.get("sensor.test_snoo_state").state == "stop" + assert hass.states.get("sensor.test_snoo_time_left").state == STATE_UNKNOWN diff --git a/tests/components/snoo/test_switch.py b/tests/components/snoo/test_switch.py new file mode 100644 index 00000000000..2343ff6c0d8 --- /dev/null +++ b/tests/components/snoo/test_switch.py @@ -0,0 +1,88 @@ +"""Test Snoo Switches.""" + +import copy +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooDevice +from python_snoo.exceptions import SnooCommandException + +from homeassistant.components.switch import ( + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_switch(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test switch and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("switch")) == 2 + assert hass.states.get("switch.test_snoo_level_lock").state == STATE_UNAVAILABLE + assert ( + hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_UNAVAILABLE + ) + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("switch")) == 2 + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + assert hass.states.get("switch.test_snoo_level_lock").state == STATE_OFF + + +async def test_update_success(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test changing values for switch entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + + async def set_sticky_white_noise(device: SnooDevice, state: bool): + new_data = copy.deepcopy(MOCK_SNOO_DATA) + new_data.state_machine.sticky_white_noise = "off" if not state else "on" + find_update_callback(bypass_api, device.serialNumber)(new_data) + + bypass_api.set_sticky_white_noise.side_effect = set_sticky_white_noise + await hass.services.async_call( + "switch", + SERVICE_TOGGLE, + blocking=True, + target={"entity_id": "switch.test_snoo_sleepytime_sounds"}, + ) + + assert bypass_api.set_sticky_white_noise.assert_called_once + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_ON + + +@pytest.mark.parametrize( + ("command", "error_str"), + [ + (SERVICE_TURN_ON, "Turning Sleepytime sounds on failed"), + (SERVICE_TURN_OFF, "Turning Sleepytime sounds off failed"), + ], +) +async def test_update_failed( + hass: HomeAssistant, bypass_api: AsyncMock, command: str, error_str: str +) -> None: + """Test failing to change values for switch entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + + bypass_api.set_sticky_white_noise.side_effect = SnooCommandException + with pytest.raises(HomeAssistantError, match=error_str): + await hass.services.async_call( + "switch", + command, + blocking=True, + target={"entity_id": "switch.test_snoo_sleepytime_sounds"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF diff --git a/tests/components/sonos/fixtures/sonos_favorites.json b/tests/components/sonos/fixtures/sonos_favorites.json index 21ee68f4872..40213ea8715 100644 --- a/tests/components/sonos/fixtures/sonos_favorites.json +++ b/tests/components/sonos/fixtures/sonos_favorites.json @@ -27,6 +27,7 @@ "title": "1984", "parent_id": "FV:2", "item_id": "FV:2/8", + "album_art_uri": "http://192.168.42.2:1400/getaa?u=x-file-cifs%3a%2f%2f192.168.42.2%2fmusic%2fiTunes%2520Music%2fAerosmith%2f1984&v=742", "resource_meta_data": "1984object.container.album.musicAlbumRINCON_AssociatedZPUDN", "resources": [ { @@ -34,5 +35,23 @@ "protocol_info": "a:b:c:d" } ] + }, + { + "title": "American Tall Tales", + "parent_id": "FV:2", + "item_id": "FV:2/66", + "restricted": false, + "resource_meta_data": "American Tall Talesobject.item.audioItem.audioBookSA_RINCON61191_X_#Svc6-0-Token", + "resources": [ + { + "uri": "x-rincon-cpcontainer:101340c8reftitle%C9F27_com?sid=239&flags=16584&sn=5", + "protocol_info": "x-rincon-cpcontainer:*:*:*" + } + ], + "desc": null, + "album_art_uri": "https://m.media-amazon.com/images/I/810lqLo5m0L._SL600_.jpg", + "type": "instantPlay", + "description": "Audible", + "favorite_nr": "0" } ] diff --git a/tests/components/sonos/snapshots/test_media_browser.ambr b/tests/components/sonos/snapshots/test_media_browser.ambr index ae8e813ae5d..24f08eaf95b 100644 --- a/tests/components/sonos/snapshots/test_media_browser.ambr +++ b/tests/components/sonos/snapshots/test_media_browser.ambr @@ -1,4 +1,99 @@ # serializer version: 1 +# name: test_browse_media_favorites[-favorites] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'album', + 'media_content_id': 'object.container.album.musicAlbum', + 'media_content_type': 'favorites_folder', + 'thumbnail': None, + 'title': 'Albums', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'track', + 'media_content_id': 'object.item.audioItem.audioBook', + 'media_content_type': 'favorites_folder', + 'thumbnail': None, + 'title': 'Audio Book', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'genre', + 'media_content_id': 'object.item.audioItem.audioBroadcast', + 'media_content_type': 'favorites_folder', + 'thumbnail': None, + 'title': 'Radio', + }), + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': '', + 'media_content_type': 'favorites', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Favorites', + }) +# --- +# name: test_browse_media_favorites[object.container.album.musicAlbum-favorites_folder] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'album', + 'media_content_id': 'FV:2/8', + 'media_content_type': 'favorite_item_id', + 'thumbnail': 'http://192.168.42.2:1400/getaa?u=x-file-cifs://192.168.42.2/music/iTunes%20Music/Aerosmith/1984&v=742', + 'title': '1984', + }), + ]), + 'children_media_class': 'album', + 'media_class': 'directory', + 'media_content_id': '', + 'media_content_type': 'favorites', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Albums', + }) +# --- +# name: test_browse_media_favorites[object.item.audioItem.audioBook-favorites_folder] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'track', + 'media_content_id': 'FV:2/66', + 'media_content_type': 'favorite_item_id', + 'thumbnail': 'https://m.media-amazon.com/images/I/810lqLo5m0L._SL600_.jpg', + 'title': 'American Tall Tales', + }), + ]), + 'children_media_class': 'track', + 'media_class': 'directory', + 'media_content_id': '', + 'media_content_type': 'favorites', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Audio Book', + }) +# --- # name: test_browse_media_library list([ dict({ diff --git a/tests/components/sonos/test_config_flow.py b/tests/components/sonos/test_config_flow.py index 70605092da1..8454b4ad673 100644 --- a/tests/components/sonos/test_config_flow.py +++ b/tests/components/sonos/test_config_flow.py @@ -123,6 +123,22 @@ async def test_zeroconf_form( assert len(mock_manager.mock_calls) == 2 +async def test_zeroconf_form_not_ipv4( + hass: HomeAssistant, zeroconf_payload: ZeroconfServiceInfo +) -> None: + """Test we pass Zeroconf discoveries to the manager.""" + mock_manager = hass.data[DATA_SONOS_DISCOVERY_MANAGER] = MagicMock() + zeroconf_payload.ip_address = ip_address("2001:db8:3333:4444:5555:6666:7777:8888") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf_payload, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_ipv4_address" + assert mock_manager.call_count == 0 + + async def test_ssdp_discovery(hass: HomeAssistant, soco) -> None: """Test that SSDP discoveries create a config flow.""" diff --git a/tests/components/sonos/test_init.py b/tests/components/sonos/test_init.py index a7ad2f4cb82..c6be606eb20 100644 --- a/tests/components/sonos/test_init.py +++ b/tests/components/sonos/test_init.py @@ -455,3 +455,32 @@ async def test_async_poll_manual_hosts_8( assert "media_player.garage" in entity_registry.entities assert "media_player.studio" in entity_registry.entities await hass.async_block_till_done(wait_background_tasks=True) + + +async def _setup_hass_ipv6_address_not_supported(hass: HomeAssistant): + await async_setup_component( + hass, + sonos.DOMAIN, + { + "sonos": { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["2001:db8:3333:4444:5555:6666:7777:8888"], + } + } + }, + ) + await hass.async_block_till_done() + + +async def test_ipv6_not_supported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Tests that invalid ipv4 addresses do not generate stack dump.""" + with caplog.at_level(logging.DEBUG): + caplog.clear() + await _setup_hass_ipv6_address_not_supported(hass) + await hass.async_block_till_done() + assert "invalid ip_address received" in caplog.text + assert "2001:db8:3333:4444:5555:6666:7777:8888" in caplog.text diff --git a/tests/components/sonos/test_media_browser.py b/tests/components/sonos/test_media_browser.py index 6e03935f7f6..ce6e103be58 100644 --- a/tests/components/sonos/test_media_browser.py +++ b/tests/components/sonos/test_media_browser.py @@ -2,6 +2,7 @@ from functools import partial +import pytest from syrupy import SnapshotAssertion from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType @@ -176,3 +177,43 @@ async def test_browse_media_library_albums( assert response["success"] assert response["result"]["children"] == snapshot assert soco_mock.music_library.browse_by_idstring.call_count == 1 + + +@pytest.mark.parametrize( + ("media_content_id", "media_content_type"), + [ + ( + "", + "favorites", + ), + ( + "object.item.audioItem.audioBook", + "favorites_folder", + ), + ( + "object.container.album.musicAlbum", + "favorites_folder", + ), + ], +) +async def test_browse_media_favorites( + async_autosetup_sonos, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + media_content_id, + media_content_type, +) -> None: + """Test the async_browse_media method.""" + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.zone_a", + "media_content_id": media_content_id, + "media_content_type": media_content_type, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index cec40c997a7..78d88a1ea98 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -692,6 +692,7 @@ async def test_select_source_line_in_tv( "play_uri": 1, "play_uri_uri": "x-sonosapi-radio:ST%3aetc", "play_uri_title": "James Taylor Radio", + "play_uri_meta": 'James Taylor Radioobject.item.audioItem.audioBroadcast.#stationSA_RINCON60423_X_#Svc60423-99999999-Token', }, ), ( @@ -700,6 +701,16 @@ async def test_select_source_line_in_tv( "play_uri": 1, "play_uri_uri": "x-sonosapi-hls:Api%3atune%3aliveAudio%3ajazzcafe%3aetc", "play_uri_title": "66 - Watercolors", + "play_uri_meta": '66 - Watercolorsobject.item.audioItem.audioBroadcastSA_RINCON9479_X_#Svc9479-99999999-Token', + }, + ), + ( + "American Tall Tales", + { + "play_uri": 1, + "play_uri_uri": "x-rincon-cpcontainer:101340c8reftitle%C9F27_com?sid=239&flags=16584&sn=5", + "play_uri_title": "American Tall Tales", + "play_uri_meta": 'American Tall Talesobject.item.audioItem.audioBookSA_RINCON61191_X_#Svc6-0-Token', }, ), ], @@ -726,6 +737,7 @@ async def test_select_source_play_uri( soco_mock.play_uri.assert_called_with( result.get("play_uri_uri"), title=result.get("play_uri_title"), + meta=result.get("play_uri_meta"), timeout=LONG_SERVICE_TIMEOUT, ) diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py index 429c3b62087..769e611bf28 100644 --- a/tests/components/squeezebox/conftest.py +++ b/tests/components/squeezebox/conftest.py @@ -269,6 +269,7 @@ def mock_pysqueezebox_player(uuid: str) -> MagicMock: mock_player.title = None mock_player.image_url = None mock_player.model = "SqueezeLite" + mock_player.creator = "Ralph Irving & Adrian Smith" return mock_player @@ -309,7 +310,27 @@ async def configure_squeezebox_media_player_platform( ) -> None: """Configure a squeezebox config entry with appropriate mocks for media_player.""" with ( - patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]), + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.MEDIA_PLAYER], + ), + patch("homeassistant.components.squeezebox.Server", return_value=lms), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + +async def configure_squeezebox_media_player_button_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + lms: MagicMock, +) -> None: + """Configure a squeezebox config entry with appropriate mocks for media_player.""" + with ( + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.BUTTON], + ), patch("homeassistant.components.squeezebox.Server", return_value=lms), ): await hass.config_entries.async_setup(config_entry.entry_id) @@ -325,6 +346,15 @@ async def configured_player( return (await lms.async_get_players())[0] +@pytest.fixture +async def configured_player_with_button( + hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock +) -> MagicMock: + """Fixture mocking calls to pysqueezebox Player from a configured squeezebox.""" + await configure_squeezebox_media_player_button_platform(hass, config_entry, lms) + return (await lms.async_get_players())[0] + + @pytest.fixture async def configured_players( hass: HomeAssistant, config_entry: MockConfigEntry, lms_factory: MagicMock diff --git a/tests/components/squeezebox/snapshots/test_media_player.ambr b/tests/components/squeezebox/snapshots/test_media_player.ambr index 34d6ae16af8..c0633035a84 100644 --- a/tests/components/squeezebox/snapshots/test_media_player.ambr +++ b/tests/components/squeezebox/snapshots/test_media_player.ambr @@ -24,7 +24,7 @@ 'is_new': False, 'labels': set({ }), - 'manufacturer': 'Ralph Irving', + 'manufacturer': 'Ralph Irving & Adrian Smith', 'model': 'SqueezeLite', 'model_id': None, 'name': 'Test Player', diff --git a/tests/components/squeezebox/test_button.py b/tests/components/squeezebox/test_button.py new file mode 100644 index 00000000000..16ced65be61 --- /dev/null +++ b/tests/components/squeezebox/test_button.py @@ -0,0 +1,23 @@ +"""Tests for the squeezebox button component.""" + +from unittest.mock import MagicMock + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + + +async def test_squeezebox_press( + hass: HomeAssistant, configured_player_with_button: MagicMock +) -> None: + """Test press service call.""" + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_player_preset_1"}, + blocking=True, + ) + + configured_player_with_button.async_query.assert_called_with( + "button", "preset_1.single" + ) diff --git a/tests/components/stookwijzer/conftest.py b/tests/components/stookwijzer/conftest.py index 40582dc4be3..dd7f2a7bbc3 100644 --- a/tests/components/stookwijzer/conftest.py +++ b/tests/components/stookwijzer/conftest.py @@ -1,6 +1,7 @@ """Fixtures for Stookwijzer integration tests.""" from collections.abc import Generator +from typing import Required, TypedDict from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -12,6 +13,14 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry +class Forecast(TypedDict): + """Typed Stookwijzer forecast dict.""" + + datetime: Required[str] + advice: str | None + final: bool | None + + @pytest.fixture def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" @@ -80,6 +89,28 @@ def mock_stookwijzer() -> Generator[MagicMock]: client.windspeed_ms = 2.5 client.windspeed_bft = 2 client.advice = "code_yellow" + client.async_get_forecast.return_value = ( + Forecast( + datetime="2025-02-12T17:00:00+01:00", + advice="code_yellow", + final=True, + ), + Forecast( + datetime="2025-02-12T23:00:00+01:00", + advice="code_yellow", + final=True, + ), + Forecast( + datetime="2025-02-13T05:00:00+01:00", + advice="code_orange", + final=False, + ), + Forecast( + datetime="2025-02-13T11:00:00+01:00", + advice="code_orange", + final=False, + ), + ) yield stookwijzer_mock diff --git a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr index e2535d54466..452b5bd0a30 100644 --- a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr +++ b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr @@ -3,6 +3,28 @@ dict({ 'advice': 'code_yellow', 'air_quality_index': 2, + 'forecast': list([ + dict({ + 'advice': 'code_yellow', + 'datetime': '2025-02-12T17:00:00+01:00', + 'final': True, + }), + dict({ + 'advice': 'code_yellow', + 'datetime': '2025-02-12T23:00:00+01:00', + 'final': True, + }), + dict({ + 'advice': 'code_orange', + 'datetime': '2025-02-13T05:00:00+01:00', + 'final': False, + }), + dict({ + 'advice': 'code_orange', + 'datetime': '2025-02-13T11:00:00+01:00', + 'final': False, + }), + ]), 'windspeed_ms': 2.5, }) # --- diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 39e4de13fed..296505271c0 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -27,6 +27,8 @@ from homeassistant.components.stream.worker import StreamState from .common import generate_h264_video, stream_teardown +_LOGGER = logging.getLogger(__name__) + TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout @@ -44,7 +46,7 @@ class WorkerSync: def resume(self): """Allow the worker thread to finalize the stream.""" - logging.debug("waking blocked worker") + _LOGGER.debug("waking blocked worker") self._event.set() def blocking_discontinuity(self, stream_state: StreamState): @@ -52,7 +54,7 @@ class WorkerSync: # Worker is ending the stream, which clears all output buffers. # Block the worker thread until the test has a chance to verify # the segments under test. - logging.debug("blocking worker") + _LOGGER.debug("blocking worker") if self._event: self._event.wait() diff --git a/tests/components/stream/test_ll_hls.py b/tests/components/stream/test_ll_hls.py index 443103fdf92..1eb638237af 100644 --- a/tests/components/stream/test_ll_hls.py +++ b/tests/components/stream/test_ll_hls.py @@ -202,7 +202,7 @@ async def test_ll_hls_stream( datetime_re = re.compile(r"#EXT-X-PROGRAM-DATE-TIME:(?P.+)") inf_re = re.compile(r"#EXTINF:(?P[0-9]{1,}.[0-9]{3,}),") # keep track of which tests were done (indexed by re) - tested = {regex: False for regex in (part_re, datetime_re, inf_re)} + tested = dict.fromkeys((part_re, datetime_re, inf_re), False) # keep track of times and durations along playlist for checking consistency part_durations = [] segment_duration = 0 diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 2be972cc6a2..276b4109652 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -56,6 +56,8 @@ from .test_ll_hls import TEST_PART_DURATION from tests.components.camera.common import EMPTY_8_6_JPEG, mock_turbo_jpeg +_LOGGER = logging.getLogger(__name__) + STREAM_SOURCE = "some-stream-source" # Formats here are arbitrary, not exercised by tests AUDIO_STREAM_FORMAT = "mp3" @@ -229,7 +231,7 @@ class FakePyAvBuffer: return def mux(self, packet): - logging.debug("Muxed packet: %s", packet) + _LOGGER.debug("Muxed packet: %s", packet) self.capture_packets.append(packet) def __str__(self) -> str: diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index 4d6794b962f..d123c93a873 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -294,3 +294,28 @@ REMOTE_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=False, tx_power=-127, ) + + +WOHUB2_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "WoHub2"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 6a7111a054e..5fd270b3393 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -25,6 +25,7 @@ from . import ( LEAK_SERVICE_INFO, REMOTE_SERVICE_INFO, WOHAND_SERVICE_INFO, + WOHUB2_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO, WORELAY_SWITCH_1PM_SERVICE_INFO, ) @@ -234,3 +235,61 @@ async def test_remote(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hub2_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the sensor for WoHub2.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WOHUB2_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "hub2", + }, + unique_id="aabbccddeeff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 5 + + temperature_sensor = hass.states.get("sensor.test_name_temperature") + temperature_sensor_attrs = temperature_sensor.attributes + assert temperature_sensor.state == "26.4" + assert temperature_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Temperature" + assert temperature_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" + assert temperature_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humidity_sensor = hass.states.get("sensor.test_name_humidity") + humidity_sensor_attrs = humidity_sensor.attributes + assert humidity_sensor.state == "44" + assert humidity_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Humidity" + assert humidity_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert humidity_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + light_level_sensor = hass.states.get("sensor.test_name_light_level") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "4" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Light level" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Level" + + light_level_sensor = hass.states.get("sensor.test_name_illuminance") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "30" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Illuminance" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "lx" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/synology_dsm/test_backup.py b/tests/components/synology_dsm/test_backup.py index 24cfe29f52b..db0062b45bf 100644 --- a/tests/components/synology_dsm/test_backup.py +++ b/tests/components/synology_dsm/test_backup.py @@ -4,9 +4,13 @@ from io import StringIO from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder -from synology_dsm.exceptions import SynologyDSMAPIErrorException +from synology_dsm.exceptions import ( + SynologyDSMAPIErrorException, + SynologyDSMRequestException, +) from homeassistant.components.backup import ( DOMAIN as BACKUP_DOMAIN, @@ -279,6 +283,50 @@ async def test_agents_on_unload( } +async def test_agents_on_changed_update_success( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test backup agent on changed update success of coordintaor.""" + client = await hass_ws_client(hass) + + # config entry is loaded + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was successful + freezer.tick(910) # 15 min interval + 10s + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was un-successful + setup_dsm_with_filestation.update.side_effect = SynologyDSMRequestException( + OSError() + ) + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 1 + + # coordinator update was successful again + setup_dsm_with_filestation.update.side_effect = None + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + async def test_agents_list_backups( hass: HomeAssistant, setup_dsm_with_filestation: MagicMock, @@ -338,7 +386,7 @@ async def test_agents_list_backups_error( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 1ee9067a528..8108e4777c8 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -114,34 +114,34 @@ # name: test_sensor[System Monitor Last boot - state] '2024-02-24T15:00:00+00:00' # --- -# name: test_sensor[System Monitor Load (15m) - attributes] +# name: test_sensor[System Monitor Load (15 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (15m)', + 'friendly_name': 'System Monitor Load (15 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (15m) - state] +# name: test_sensor[System Monitor Load (15 min) - state] '3' # --- -# name: test_sensor[System Monitor Load (1m) - attributes] +# name: test_sensor[System Monitor Load (1 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (1m)', + 'friendly_name': 'System Monitor Load (1 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (1m) - state] +# name: test_sensor[System Monitor Load (1 min) - state] '1' # --- -# name: test_sensor[System Monitor Load (5m) - attributes] +# name: test_sensor[System Monitor Load (5 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (5m)', + 'friendly_name': 'System Monitor Load (5 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (5m) - state] +# name: test_sensor[System Monitor Load (5 min) - state] '2' # --- # name: test_sensor[System Monitor Memory free - attributes] diff --git a/tests/components/tado/__init__.py b/tests/components/tado/__init__.py index 11d199f01a1..e6b6257e6ea 100644 --- a/tests/components/tado/__init__.py +++ b/tests/components/tado/__init__.py @@ -1 +1 @@ -"""Tests for the tado integration.""" +"""Tests for the Tado integration.""" diff --git a/tests/components/tado/conftest.py b/tests/components/tado/conftest.py new file mode 100644 index 00000000000..1aa62b218a2 --- /dev/null +++ b/tests/components/tado/conftest.py @@ -0,0 +1,50 @@ +"""Fixtures for Tado tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from PyTado.http import DeviceActivationStatus +import pytest + +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN + +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_tado_api() -> Generator[MagicMock]: + """Mock the Tado API.""" + with ( + patch("homeassistant.components.tado.Tado") as mock_tado, + patch("homeassistant.components.tado.config_flow.Tado", new=mock_tado), + ): + client = mock_tado.return_value + client.device_verification_url.return_value = ( + "https://login.tado.com/oauth2/device?user_code=TEST" + ) + client.device_activation_status.return_value = DeviceActivationStatus.COMPLETED + client.get_me.return_value = load_json_object_fixture("me.json", DOMAIN) + client.get_refresh_token.return_value = "refresh" + yield client + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock the setup entry.""" + with patch( + "homeassistant.components.tado.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_REFRESH_TOKEN: "refresh", + }, + unique_id="1", + version=2, + ) diff --git a/tests/components/tado/fixtures/device_authorize.json b/tests/components/tado/fixtures/device_authorize.json new file mode 100644 index 00000000000..aacd171fafd --- /dev/null +++ b/tests/components/tado/fixtures/device_authorize.json @@ -0,0 +1,8 @@ +{ + "device_code": "ABCD", + "expires_in": 300, + "interval": 5, + "user_code": "TEST", + "verification_uri": "https://login.tado.com/oauth2/device", + "verification_uri_complete": "https://login.tado.com/oauth2/device?user_code=TEST" +} diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index 19acb0aecbd..f7418309d46 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -1,20 +1,20 @@ """Test the Tado config flow.""" -from http import HTTPStatus from ipaddress import ip_address -from unittest.mock import MagicMock, patch +import threading +from unittest.mock import AsyncMock, MagicMock, patch -import PyTado +from PyTado.http import DeviceActivationStatus import pytest -import requests -from homeassistant import config_entries -from homeassistant.components.tado.config_flow import NoHomes +from homeassistant.components.tado.config_flow import TadoException from homeassistant.components.tado.const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, ) +from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -26,92 +26,186 @@ from homeassistant.helpers.service_info.zeroconf import ( from tests.common import MockConfigEntry -def _get_mock_tado_api(get_me=None) -> MagicMock: - mock_tado = MagicMock() - if isinstance(get_me, Exception): - type(mock_tado).get_me = MagicMock(side_effect=get_me) - else: - type(mock_tado).get_me = MagicMock(return_value=get_me) - return mock_tado +async def test_full_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config flow.""" + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_full_flow_reauth( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config when reauthticating.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="ABC-123-DEF-456", + data={CONF_REFRESH_TOKEN: "totally_refresh_for_reauth"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + # The no user input + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + + +async def test_auth_timeout( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the auth timeout.""" + mock_tado_api.device_activation_status.return_value = DeviceActivationStatus.PENDING + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "timeout" + + mock_tado_api.device_activation_status.return_value = ( + DeviceActivationStatus.COMPLETED + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "timeout" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_homes(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: + """Test the full flow of the config flow.""" + mock_tado_api.get_me.return_value["homes"] = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "finish_login" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_homes" + + +async def test_tado_creation(hass: HomeAssistant) -> None: + """Test we handle Form Exceptions.""" + + with patch( + "homeassistant.components.tado.config_flow.Tado", + side_effect=TadoException("Test exception"), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" @pytest.mark.parametrize( ("exception", "error"), [ - (KeyError, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (ValueError, "unknown"), + (Exception, "timeout"), + (TadoException, "timeout"), ], ) -async def test_form_exceptions( - hass: HomeAssistant, exception: Exception, error: str +async def test_wait_for_login_exception( + hass: HomeAssistant, + mock_tado_api: MagicMock, + exception: Exception, + error: str, ) -> None: - """Test we handle Form Exceptions.""" + """Test that an exception in wait for login is handled properly.""" + mock_tado_api.device_activation.side_effect = exception + result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - # Test a retry to recover, upon failure - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 + # @joostlek: I think the timeout step is not rightfully named, but heck, it works + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == error -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: """Test config flow options.""" - entry = MockConfigEntry(domain=DOMAIN, data={"username": "test-username"}) - entry.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init( - entry.entry_id, context={"source": config_entries.SOURCE_USER} - ) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -119,125 +213,17 @@ async def test_options_flow(hass: HomeAssistant) -> None: result["flow_id"], {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT} -async def test_create_entry(hass: HomeAssistant) -> None: - """Test we can setup though the user path.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_invalid_auth(hass: HomeAssistant) -> None: - """Test we handle invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.UNAUTHORIZED - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_form_cannot_connect(hass: HomeAssistant) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.INTERNAL_SERVER_ERROR - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_no_homes(hass: HomeAssistant) -> None: - """Test we handle no homes error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_tado_api = _get_mock_tado_api(get_me={"homes": []}) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_homes"} - - -async def test_form_homekit(hass: HomeAssistant) -> None: +async def test_homekit(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: """Test that we abort from homekit if tado is already setup.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -248,8 +234,7 @@ async def test_form_homekit(hass: HomeAssistant) -> None: type="mock_type", ), ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE flow = next( flow for flow in hass.config_entries.flow.async_progress() @@ -264,7 +249,7 @@ async def test_form_homekit(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -276,77 +261,3 @@ async def test_form_homekit(hass: HomeAssistant) -> None: ), ) assert result["type"] is FlowResultType.ABORT - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (PyTado.exceptions.TadoWrongCredentialsException, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (NoHomes, "no_homes"), - (ValueError, "unknown"), - ], -) -async def test_reconfigure_flow( - hass: HomeAssistant, exception: Exception, error: str -) -> None: - """Test re-configuration flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - unique_id="unique_id", - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry - assert entry.title == "Mock Title" - assert entry.data == { - "username": "test-username", - "password": "test-password", - "home_id": 1, - } diff --git a/tests/components/tado/test_helper.py b/tests/components/tado/test_helper.py index da959c2124a..7f798e3797c 100644 --- a/tests/components/tado/test_helper.py +++ b/tests/components/tado/test_helper.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch from PyTado.interface import Tado import pytest -from homeassistant.components.tado import TadoDataUpdateCoordinator +from homeassistant.components.tado import CONF_REFRESH_TOKEN, TadoDataUpdateCoordinator from homeassistant.components.tado.const import ( CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, @@ -28,13 +28,13 @@ def entry(request: pytest.FixtureRequest) -> MockConfigEntry: request.param if hasattr(request, "param") else CONST_OVERLAY_TADO_DEFAULT ) return MockConfigEntry( - version=1, - minor_version=1, + version=2, domain=DOMAIN, title="Tado", data={ CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh", }, options={ "fallback": fallback, diff --git a/tests/components/tado/test_init.py b/tests/components/tado/test_init.py new file mode 100644 index 00000000000..2f2ccacf3c0 --- /dev/null +++ b/tests/components/tado/test_init.py @@ -0,0 +1,30 @@ +"""Test the Tado integration.""" + +from homeassistant.components.tado import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_v1_migration(hass: HomeAssistant) -> None: + """Test migration from v1 to v2 config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test", + CONF_PASSWORD: "test", + }, + unique_id="1", + version=1, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.version == 2 + assert CONF_USERNAME not in entry.data + assert CONF_PASSWORD not in entry.data + + assert entry.state is ConfigEntryState.SETUP_ERROR + assert len(hass.config_entries.flow.async_progress()) == 1 diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index 5bf87dbed33..6fd333dff51 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -2,8 +2,7 @@ import requests_mock -from homeassistant.components.tado import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -178,9 +177,16 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zones/1/state", text=load_fixture(zone_1_state_fixture), ) + m.post( + "https://login.tado.com/oauth2/token", + text=load_fixture(token_fixture), + ) entry = MockConfigEntry( domain=DOMAIN, - data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, + version=2, + data={ + CONF_REFRESH_TOKEN: "mock-token", + }, options={"fallback": "NEXT_TIME_BLOCK"}, ) entry.add_to_hass(hass) diff --git a/tests/components/tankerkoenig/const.py b/tests/components/tankerkoenig/const.py index 2c28753a7f3..9a2ecb3a2be 100644 --- a/tests/components/tankerkoenig/const.py +++ b/tests/components/tankerkoenig/const.py @@ -2,7 +2,7 @@ from aiotankerkoenig import PriceInfo, Station, Status -from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS +from homeassistant.components.tankerkoenig.const import CONF_STATIONS from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -98,7 +98,6 @@ PRICES_MISSING_FUELTYPE = { CONFIG_DATA = { CONF_NAME: "Home", CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, CONF_RADIUS: 2.0, CONF_STATIONS: [ diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index b5b33d7c246..71d9d9c75f8 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -12,9 +12,6 @@ 'entry': dict({ 'data': dict({ 'api_key': '**REDACTED**', - 'fuel_types': list([ - 'e5', - ]), 'location': dict({ 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index bb1e943bbb9..967470c2c16 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -4,11 +4,7 @@ from unittest.mock import AsyncMock, patch from aiotankerkoenig.exceptions import TankerkoenigInvalidKeyError -from homeassistant.components.tankerkoenig.const import ( - CONF_FUEL_TYPES, - CONF_STATIONS, - DOMAIN, -) +from homeassistant.components.tankerkoenig.const import CONF_STATIONS, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, @@ -30,7 +26,6 @@ from tests.common import MockConfigEntry MOCK_USER_DATA = { CONF_NAME: "Home", CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, CONF_RADIUS: 2.0, } @@ -81,7 +76,6 @@ async def test_user(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"][CONF_NAME] == "Home" assert result["data"][CONF_API_KEY] == "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx" - assert result["data"][CONF_FUEL_TYPES] == ["e5"] assert result["data"][CONF_LOCATION] == {"latitude": 51.0, "longitude": 13.0} assert result["data"][CONF_RADIUS] == 2.0 assert result["data"][CONF_STATIONS] == [ diff --git a/tests/components/template/conftest.py b/tests/components/template/conftest.py index bdca84ba071..86a30535e92 100644 --- a/tests/components/template/conftest.py +++ b/tests/components/template/conftest.py @@ -1,5 +1,7 @@ """template conftest.""" +from enum import Enum + import pytest from homeassistant.core import HomeAssistant, ServiceCall @@ -9,6 +11,13 @@ from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_mock_service +class ConfigurationStyle(Enum): + """Configuration Styles for template testing.""" + + LEGACY = "Legacy" + MODERN = "Modern" + + @pytest.fixture def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr index c240a9436a0..909110fdbc8 100644 --- a/tests/components/template/snapshots/test_switch.ambr +++ b/tests/components/template/snapshots/test_switch.ambr @@ -1,5 +1,18 @@ # serializer version: 1 -# name: test_setup_config_entry +# name: test_setup_config_entry[state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'switch.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_config_entry[value_template] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'My template', diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index 2c9b81e7c91..21d740b165b 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -16,6 +16,36 @@ from homeassistant.helpers import device_registry as dr from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +SWITCH_BEFORE_OPTIONS = { + "name": "test_template_switch", + "template_type": "switch", + "turn_off": [{"event": "test_template_switch", "event_data": {"event": "off"}}], + "turn_on": [{"event": "test_template_switch", "event_data": {"event": "on"}}], + "value_template": "{{ now().minute % 2 == 0 }}", +} + + +SWITCH_AFTER_OPTIONS = { + "name": "test_template_switch", + "template_type": "switch", + "turn_off": [{"event": "test_template_switch", "event_data": {"event": "off"}}], + "turn_on": [{"event": "test_template_switch", "event_data": {"event": "on"}}], + "state": "{{ now().minute % 2 == 0 }}", + "value_template": "{{ now().minute % 2 == 0 }}", +} + +SENSOR_OPTIONS = { + "name": "test_template_sensor", + "template_type": "sensor", + "state": "{{ 'a' if now().minute % 2 == 0 else 'b' }}", +} + +BINARY_SENSOR_OPTIONS = { + "name": "test_template_sensor", + "template_type": "binary_sensor", + "state": "{{ now().minute % 2 == 0 else }}", +} + @pytest.mark.parametrize( ( diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index a94ec233f81..1a739b4921e 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -4,7 +4,7 @@ from typing import Any import pytest -from homeassistant.components import light +from homeassistant.components import light, template from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, @@ -17,6 +17,7 @@ from homeassistant.components.light import ( ColorMode, LightEntityFeature, ) +from homeassistant.components.template.light import rewrite_legacy_to_modern_conf from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -26,8 +27,12 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.template import Template from homeassistant.setup import async_setup_component +from .conftest import ConfigurationStyle + from tests.common import assert_setup_component # Represent for light's availability @@ -154,10 +159,245 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light( +TEST_MISSING_KEY_CONFIG = { + "turn_on": { + "service": "light.turn_on", + "entity_id": "light.test_state", + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + }, + }, +} + + +TEST_ON_ACTION_WITH_TRANSITION_CONFIG = { + "turn_on": { + "service": "test.automation", + "data_template": { + "transition": "{{transition}}", + }, + }, + "turn_off": { + "service": "light.turn_off", + "entity_id": "light.test_state", + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + "transition": "{{transition}}", + }, + }, +} + + +TEST_OFF_ACTION_WITH_TRANSITION_CONFIG = { + "turn_on": { + "service": "light.turn_on", + "entity_id": "light.test_state", + }, + "turn_off": { + "service": "test.automation", + "data_template": { + "transition": "{{transition}}", + }, + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + "transition": "{{transition}}", + }, + }, +} + + +TEST_ALL_COLORS_NO_TEMPLATE_CONFIG = { + "set_hs": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "h": "{{h}}", + "s": "{{s}}", + }, + }, + "set_temperature": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "color_temp": "{{color_temp}}", + }, + }, + "set_rgb": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + }, + }, + "set_rgbw": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "w": "{{w}}", + }, + }, + "set_rgbww": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "cw": "{{cw}}", + "ww": "{{ww}}", + }, + }, +} + + +TEST_UNIQUE_ID_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "not-so-unique-anymore", +} + + +@pytest.mark.parametrize( + ("old_attr", "new_attr", "attr_template"), + [ + ( + "value_template", + "state", + "{{ 1 == 1 }}", + ), + ( + "rgb_template", + "rgb", + "{{ (255,255,255) }}", + ), + ( + "rgbw_template", + "rgbw", + "{{ (255,255,255,255) }}", + ), + ( + "rgbww_template", + "rgbww", + "{{ (255,255,255,255,255) }}", + ), + ( + "effect_list_template", + "effect_list", + "{{ ['a', 'b'] }}", + ), + ( + "effect_template", + "effect", + "{{ 'a' }}", + ), + ( + "level_template", + "level", + "{{ 255 }}", + ), + ( + "max_mireds_template", + "max_mireds", + "{{ 255 }}", + ), + ( + "min_mireds_template", + "min_mireds", + "{{ 255 }}", + ), + ( + "supports_transition_template", + "supports_transition", + "{{ True }}", + ), + ( + "temperature_template", + "temperature", + "{{ 255 }}", + ), + ( + "white_value_template", + "white_value", + "{{ 255 }}", + ), + ( + "hs_template", + "hs", + "{{ (255, 255) }}", + ), + ( + "color_template", + "hs", + "{{ (255, 255) }}", + ), + ], +) +async def test_legacy_to_modern_config( + hass: HomeAssistant, old_attr: str, new_attr: str, attr_template: str +) -> None: + """Test the conversion of legacy template to modern template.""" + config = { + "foo": { + "friendly_name": "foo bar", + "unique_id": "foo-bar-light", + "icon_template": "{{ 'mdi.abc' }}", + "entity_picture_template": "{{ 'mypicture.jpg' }}", + "availability_template": "{{ 1 == 1 }}", + old_attr: attr_template, + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + } + } + altered_configs = rewrite_legacy_to_modern_conf(hass, config) + + assert len(altered_configs) == 1 + + assert [ + { + "availability": Template("{{ 1 == 1 }}", hass), + "icon": Template("{{ 'mdi.abc' }}", hass), + "name": Template("foo bar", hass), + "object_id": "foo", + "picture": Template("{{ 'mypicture.jpg' }}", hass), + "turn_off": { + "data_template": { + "action": "turn_off", + "caller": "{{ this.entity_id }}", + }, + "service": "test.automation", + }, + "turn_on": { + "data_template": { + "action": "turn_on", + "caller": "{{ this.entity_id }}", + }, + "service": "test.automation", + }, + "unique_id": "foo-bar-light", + new_attr: Template(attr_template, hass), + } + ] == altered_configs + + +async def async_setup_legacy_format( hass: HomeAssistant, count: int, light_config: dict[str, Any] ) -> None: - """Do setup of light integration.""" + """Do setup of light integration via legacy format.""" config = {"light": {"platform": "template", "lights": light_config}} with assert_setup_component(count, light.DOMAIN): @@ -172,12 +412,291 @@ async def async_setup_light( await hass.async_block_till_done() -@pytest.fixture -async def setup_light( +async def async_setup_legacy_format_with_attribute( + hass: HomeAssistant, + count: int, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of a legacy light that has a single templated attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **extra_config, + "value_template": "{{ 1 == 1 }}", + **extra, + } + }, + ) + + +async def async_setup_new_format( hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: + """Do setup of light integration via new format.""" + config = {"template": {"light": light_config}} + + with assert_setup_component(count, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + config, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +async def async_setup_modern_format_with_attribute( + hass: HomeAssistant, + count: int, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of a legacy light that has a single templated attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **extra_config, + "state": "{{ 1 == 1 }}", + **extra, + }, + ) + + +@pytest.fixture +async def setup_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + light_config: dict[str, Any], ) -> None: """Do setup of light integration.""" - await async_setup_light(hass, count, light_config) + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format(hass, count, light_config) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format(hass, count, light_config) + + +@pytest.fixture +async def setup_state_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str, +): + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": state_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "name": "test_template_light", + "state": state_template, + }, + ) + + +@pytest.fixture +async def setup_single_attribute_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format_with_attribute( + hass, count, attribute, attribute_template, extra_config + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format_with_attribute( + hass, count, attribute, attribute_template, extra_config + ) + + +@pytest.fixture +async def setup_single_action_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + extra_config: dict, +) -> None: + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format_with_attribute( + hass, count, "", "", extra_config + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format_with_attribute( + hass, count, "", "", extra_config + ) + + +@pytest.fixture +async def setup_light_with_effects( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + effect_list_template: str, + effect_template: str, +) -> None: + """Do setup of light with effects.""" + common = { + "set_effect": { + "service": "test.automation", + "data_template": { + "action": "set_effect", + "caller": "{{ this.entity_id }}", + "entity_id": "test.test_state", + "effect": "{{effect}}", + }, + }, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{{true}}", + **common, + "effect_list_template": effect_list_template, + "effect_template": effect_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "state": "{{true}}", + **common, + "effect_list": effect_list_template, + "effect": effect_template, + }, + ) + + +@pytest.fixture +async def setup_light_with_mireds( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, +) -> None: + """Do setup of light that uses mireds.""" + common = { + "set_temperature": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "color_temp": "{{color_temp}}", + }, + }, + attribute: attribute_template, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + **common, + "temperature_template": "{{200}}", + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "state": "{{ 1 == 1 }}", + **common, + "temperature": "{{200}}", + }, + ) + + +@pytest.fixture +async def setup_light_with_transition_template( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + transition_template: str, +) -> None: + """Do setup of light that uses mireds.""" + common = { + "set_effect": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "effect": "{{effect}}", + }, + }, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + **common, + "effect_list_template": "{{ ['Disco', 'Police'] }}", + "effect_template": "{{ None }}", + "supports_transition_template": transition_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, + "state": "{{ 1 == 1 }}", + **common, + "effect_list": "{{ ['Disco', 'Police'] }}", + "effect": "{{ None }}", + "supports_transition": transition_template, + }, + ) @pytest.mark.parametrize("count", [1]) @@ -186,18 +705,15 @@ async def setup_light( [(0, [ColorMode.BRIGHTNESS])], ) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.test['big.fat...']}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{states.test['big.fat...']}}"]) async def test_template_state_invalid( - hass: HomeAssistant, supported_features, supported_color_modes, setup_light + hass: HomeAssistant, supported_features, supported_color_modes, setup_state_light ) -> None: """Test template state with render error.""" state = hass.states.get("light.test_template_light") @@ -209,17 +725,14 @@ async def test_template_state_invalid( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ states.light.test_state.state }}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) -async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) +async def test_template_state_text(hass: HomeAssistant, setup_state_light) -> None: """Test the state text of a template.""" set_state = STATE_ON hass.states.async_set("light.test_state", set_state) @@ -242,7 +755,14 @@ async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("value_template", "expected_state", "expected_color_mode"), + "style", + [ + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, + ], +) +@pytest.mark.parametrize( + ("state_template", "expected_state", "expected_color_mode"), [ ( "{{ 1 == 1 }}", @@ -256,21 +776,13 @@ async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: ), ], ) -async def test_templatex_state_boolean( +async def test_legacy_template_state_boolean( hass: HomeAssistant, expected_color_mode, expected_state, - count, - value_template, + setup_state_light, ) -> None: """Test the setting of the state with boolean on.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": value_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.state == expected_state assert state.attributes.get("color_mode") == expected_color_mode @@ -280,48 +792,56 @@ async def test_templatex_state_boolean( @pytest.mark.parametrize("count", [0]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{%- if false -%}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "bad name here": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{{ 1== 1}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + {"test_template_light": "Invalid"}, + ConfigurationStyle.LEGACY, + ), + ( + { **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{%- if false -%}", - } - }, - { - "bad name here": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ 1== 1}}", - } - }, - {"test_template_light": "Invalid"}, + "name": "test_template_light", + "state": "{%- if false -%}", + }, + ConfigurationStyle.MODERN, + ), ], ) -async def test_template_syntax_error(hass: HomeAssistant, setup_light) -> None: - """Test templating syntax error.""" +async def test_template_config_errors(hass: HomeAssistant, setup_light) -> None: + """Test template light configuration errors.""" assert hass.states.async_all("light") == [] @pytest.mark.parametrize( - ("light_config", "count"), + ("light_config", "style", "count"), [ ( - { - "light_one": { - "value_template": "{{ 1== 1}}", - "turn_on": { - "service": "light.turn_on", - "entity_id": "light.test_state", - }, - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - }, - }, - } - }, + {"light_one": {"value_template": "{{ 1== 1}}", **TEST_MISSING_KEY_CONFIG}}, + ConfigurationStyle.LEGACY, + 0, + ), + ( + {"name": "light_one", "state": "{{ 1== 1}}", **TEST_MISSING_KEY_CONFIG}, + ConfigurationStyle.MODERN, 0, ), ], @@ -336,18 +856,15 @@ async def test_missing_key(hass: HomeAssistant, count, setup_light) -> None: @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.light.test_state.state}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) async def test_on_action( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] + hass: HomeAssistant, setup_state_light, calls: list[ServiceCall] ) -> None: """Test on action.""" hass.states.async_set("light.test_state", STATE_OFF) @@ -378,32 +895,26 @@ async def test_on_action( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { - "value_template": "{{states.light.test_state.state}}", - "turn_on": { - "service": "test.automation", - "data_template": { - "transition": "{{transition}}", - }, - }, - "turn_off": { - "service": "light.turn_off", - "entity_id": "light.test_state", - }, - "supports_transition_template": "{{true}}", - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - "transition": "{{transition}}", - }, - }, - } - }, + ( + { + "test_template_light": { + "value_template": "{{states.light.test_state.state}}", + **TEST_ON_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition_template": "{{true}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", + **TEST_ON_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition": "{{true}}", + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_on_action_with_transition( @@ -437,13 +948,23 @@ async def test_on_action_with_transition( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - } - }, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_on_action_optimistic( @@ -497,18 +1018,15 @@ async def test_on_action_optimistic( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.light.test_state.state}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) async def test_off_action( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] + hass: HomeAssistant, setup_state_light, calls: list[ServiceCall] ) -> None: """Test off action.""" hass.states.async_set("light.test_state", STATE_ON) @@ -538,32 +1056,27 @@ async def test_off_action( @pytest.mark.parametrize("count", [(1)]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { - "value_template": "{{states.light.test_state.state}}", - "turn_on": { - "service": "light.turn_on", - "entity_id": "light.test_state", - }, - "turn_off": { - "service": "test.automation", - "data_template": { - "transition": "{{transition}}", - }, - }, - "supports_transition_template": "{{true}}", - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - "transition": "{{transition}}", - }, - }, - } - }, + ( + { + "test_template_light": { + "value_template": "{{states.light.test_state.state}}", + **TEST_OFF_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition_template": "{{true}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", + "state": "{{states.light.test_state.state}}", + **TEST_OFF_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition": "{{true}}", + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_off_action_with_transition( @@ -596,13 +1109,23 @@ async def test_off_action_with_transition( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - } - }, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_off_action_optimistic( @@ -632,19 +1155,16 @@ async def test_off_action_optimistic( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{1 == 1}}"]) async def test_level_action_no_template( hass: HomeAssistant, - setup_light, + setup_state_light, calls: list[ServiceCall], ) -> None: """Test setting brightness with optimistic template.""" @@ -671,9 +1191,18 @@ async def test_level_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_level", "level_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "level_template"), + (ConfigurationStyle.MODERN, "level"), + ], +) +@pytest.mark.parametrize( + ("expected_level", "attribute_template", "expected_color_mode"), [ (255, "{{255}}", ColorMode.BRIGHTNESS), (None, "{{256}}", ColorMode.BRIGHTNESS), @@ -690,20 +1219,11 @@ async def test_level_action_no_template( ) async def test_level_template( hass: HomeAssistant, - expected_level, - expected_color_mode, - count, - level_template, + expected_level: Any, + expected_color_mode: ColorMode, + setup_single_attribute_light, ) -> None: """Test the template for the level.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "level_template": level_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("brightness") == expected_level assert state.state == STATE_ON @@ -712,9 +1232,18 @@ async def test_level_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_temp", "temperature_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "temperature_template"), + (ConfigurationStyle.MODERN, "temperature"), + ], +) +@pytest.mark.parametrize( + ("expected_temp", "attribute_template", "expected_color_mode"), [ (500, "{{500}}", ColorMode.COLOR_TEMP), (None, "{{501}}", ColorMode.COLOR_TEMP), @@ -727,20 +1256,11 @@ async def test_level_template( ) async def test_temperature_template( hass: HomeAssistant, - expected_temp, - expected_color_mode, - count, - temperature_template, + expected_temp: Any, + expected_color_mode: ColorMode, + setup_single_attribute_light, ) -> None: """Test the template for the temperature.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "temperature_template": temperature_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("color_temp") == expected_temp assert state.state == STATE_ON @@ -749,21 +1269,19 @@ async def test_temperature_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_temperature_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting temperature with optimistic template.""" @@ -793,43 +1311,53 @@ async def test_temperature_action_no_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style", "entity_id"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "friendly_name": "Template light", + "value_template": "{{ 1 == 1 }}", + } + }, + ConfigurationStyle.LEGACY, + "light.test_template_light", + ), + ( + { **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - } - }, + "name": "Template light", + "state": "{{ 1 == 1 }}", + }, + ConfigurationStyle.MODERN, + "light.template_light", + ), ], ) -async def test_friendly_name(hass: HomeAssistant, setup_light) -> None: +async def test_friendly_name(hass: HomeAssistant, entity_id: str, setup_light) -> None: """Test the accessibility of the friendly_name attribute.""" - state = hass.states.get("light.test_template_light") + state = hass.states.get(entity_id) assert state is not None assert state.attributes.get("friendly_name") == "Template light" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - "icon_template": ( - "{% if states.light.test_state.state %}mdi:check{% endif %}" - ), - } - }, + (ConfigurationStyle.LEGACY, "icon_template"), + (ConfigurationStyle.MODERN, "icon"), ], ) -async def test_icon_template(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize( + "attribute_template", ["{% if states.light.test_state.state %}mdi:check{% endif %}"] +) +async def test_icon_template(hass: HomeAssistant, setup_single_attribute_light) -> None: """Test icon template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("icon") == "" @@ -842,23 +1370,23 @@ async def test_icon_template(hass: HomeAssistant, setup_light) -> None: assert state.attributes["icon"] == "mdi:check" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - "entity_picture_template": ( - "{% if states.light.test_state.state %}/local/light.png{% endif %}" - ), - } - }, + (ConfigurationStyle.LEGACY, "entity_picture_template"), + (ConfigurationStyle.MODERN, "picture"), ], ) -async def test_entity_picture_template(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize( + "attribute_template", + ["{% if states.light.test_state.state %}/local/light.png{% endif %}"], +) +async def test_entity_picture_template( + hass: HomeAssistant, setup_single_attribute_light +) -> None: """Test entity_picture template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("entity_picture") == "" @@ -871,21 +1399,21 @@ async def test_entity_picture_template(hass: HomeAssistant, setup_light) -> None assert state.attributes["entity_picture"] == "/local/light.png" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [ - { - "test_template_light": { - **OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + (1, OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG), + ], +) +@pytest.mark.parametrize( + "style", + [ + ConfigurationStyle.LEGACY, ], ) async def test_legacy_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting color with optimistic template.""" @@ -913,24 +1441,25 @@ async def test_legacy_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [ - { - "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + (1, OPTIMISTIC_HS_COLOR_LIGHT_CONFIG), + ], +) +@pytest.mark.parametrize( + "style", + [ + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_hs_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: - """Test setting hs color with optimistic template.""" + """Test setting color with optimistic template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") is None @@ -955,21 +1484,20 @@ async def test_hs_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgb_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgb color with optimistic template.""" @@ -998,21 +1526,20 @@ async def test_rgb_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgbw_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgbw color with optimistic template.""" @@ -1045,21 +1572,20 @@ async def test_rgbw_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgbww_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgbww color with optimistic template.""" @@ -1123,7 +1649,7 @@ async def test_legacy_color_template( "color_template": color_template, } } - await async_setup_light(hass, count, light_config) + await async_setup_legacy_format(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") == expected_hs assert state.state == STATE_ON @@ -1132,9 +1658,18 @@ async def test_legacy_color_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_hs", "hs_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_HS_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "hs_template"), + (ConfigurationStyle.MODERN, "hs"), + ], +) +@pytest.mark.parametrize( + ("expected_hs", "attribute_template", "expected_color_mode"), [ ((360, 100), "{{(360, 100)}}", ColorMode.HS), ((360, 100), "(360, 100)", ColorMode.HS), @@ -1152,18 +1687,9 @@ async def test_hs_template( hass: HomeAssistant, expected_hs, expected_color_mode, - count, - hs_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "hs_template": hs_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") == expected_hs assert state.state == STATE_ON @@ -1172,9 +1698,18 @@ async def test_hs_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgb", "rgb_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgb_template"), + (ConfigurationStyle.MODERN, "rgb"), + ], +) +@pytest.mark.parametrize( + ("expected_rgb", "attribute_template", "expected_color_mode"), [ ((160, 78, 192), "{{(160, 78, 192)}}", ColorMode.RGB), ((160, 78, 192), "{{[160, 78, 192]}}", ColorMode.RGB), @@ -1193,18 +1728,9 @@ async def test_rgb_template( hass: HomeAssistant, expected_rgb, expected_color_mode, - count, - rgb_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgb_template": rgb_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgb_color") == expected_rgb assert state.state == STATE_ON @@ -1213,9 +1739,18 @@ async def test_rgb_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgbw", "rgbw_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgbw_template"), + (ConfigurationStyle.MODERN, "rgbw"), + ], +) +@pytest.mark.parametrize( + ("expected_rgbw", "attribute_template", "expected_color_mode"), [ ((160, 78, 192, 25), "{{(160, 78, 192, 25)}}", ColorMode.RGBW), ((160, 78, 192, 25), "{{[160, 78, 192, 25]}}", ColorMode.RGBW), @@ -1235,18 +1770,9 @@ async def test_rgbw_template( hass: HomeAssistant, expected_rgbw, expected_color_mode, - count, - rgbw_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgbw_template": rgbw_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgbw_color") == expected_rgbw assert state.state == STATE_ON @@ -1255,9 +1781,18 @@ async def test_rgbw_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgbww", "rgbww_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgbww_template"), + (ConfigurationStyle.MODERN, "rgbww"), + ], +) +@pytest.mark.parametrize( + ("expected_rgbww", "attribute_template", "expected_color_mode"), [ ((160, 78, 192, 25, 55), "{{(160, 78, 192, 25, 55)}}", ColorMode.RGBWW), ((160, 78, 192, 25, 55), "(160, 78, 192, 25, 55)", ColorMode.RGBWW), @@ -1282,18 +1817,9 @@ async def test_rgbww_template( hass: HomeAssistant, expected_rgbww, expected_color_mode, - count, - rgbww_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgbww_template": rgbww_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgbww_color") == expected_rgbww assert state.state == STATE_ON @@ -1304,59 +1830,27 @@ async def test_rgbww_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + **TEST_ALL_COLORS_NO_TEMPLATE_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - "set_hs": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "h": "{{h}}", - "s": "{{s}}", - }, - }, - "set_temperature": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "set_rgb": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - }, - }, - "set_rgbw": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - "w": "{{w}}", - }, - }, - "set_rgbww": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - "cw": "{{cw}}", - "ww": "{{ww}}", - }, - }, - } - }, + "state": "{{1 == 1}}", + **TEST_ALL_COLORS_NO_TEMPLATE_CONFIG, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_all_colors_mode_no_template( @@ -1554,29 +2048,21 @@ async def test_all_colors_mode_no_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +@pytest.mark.parametrize( + ("effect_list_template", "effect_template", "effect", "expected"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{true}}", - "set_effect": { - "service": "test.automation", - "data_template": { - "action": "set_effect", - "caller": "{{ this.entity_id }}", - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ 'Disco' }}", - } - }, + ("{{ ['Disco', 'Police'] }}", "{{ 'Disco' }}", "Disco", "Disco"), + ("{{ ['Disco', 'Police'] }}", "{{ 'None' }}", "RGB", None), ], ) -async def test_effect_action_valid_effect( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] +async def test_effect_action( + hass: HomeAssistant, + effect: str, + expected: Any, + setup_light_with_effects, + calls: list[ServiceCall], ) -> None: """Test setting valid effect with template.""" state = hass.states.get("light.test_template_light") @@ -1585,64 +2071,24 @@ async def test_effect_action_valid_effect( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: "Disco"}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: effect}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "set_effect" assert calls[-1].data["caller"] == "light.test_template_light" - assert calls[-1].data["effect"] == "Disco" + assert calls[-1].data["effect"] == effect state = hass.states.get("light.test_template_light") assert state is not None - assert state.attributes.get("effect") == "Disco" + assert state.attributes.get("effect") == expected -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize(("count", "effect_template"), [(1, "{{ None }}")]) @pytest.mark.parametrize( - "light_config", - [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{true}}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ None }}", - } - }, - ], + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] ) -async def test_effect_action_invalid_effect( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] -) -> None: - """Test setting invalid effect with template.""" - state = hass.states.get("light.test_template_light") - assert state is not None - - await hass.services.async_call( - light.DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: "RGB"}, - blocking=True, - ) - - assert len(calls) == 1 - assert calls[0].data["effect"] == "RGB" - - state = hass.states.get("light.test_template_light") - assert state is not None - assert state.attributes.get("effect") is None - - -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( ("expected_effect_list", "effect_list_template"), [ @@ -1663,31 +2109,21 @@ async def test_effect_action_invalid_effect( ], ) async def test_effect_list_template( - hass: HomeAssistant, expected_effect_list, count, effect_list_template + hass: HomeAssistant, expected_effect_list, setup_light_with_effects ) -> None: """Test the template for the effect list.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_template": "{{ None }}", - "effect_list_template": effect_list_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("effect_list") == expected_effect_list -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("count", "effect_list_template"), + [(1, "{{ ['Strobe color', 'Police', 'Christmas', 'RGB', 'Random Loop'] }}")], +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) @pytest.mark.parametrize( ("expected_effect", "effect_template"), [ @@ -1699,27 +2135,9 @@ async def test_effect_list_template( ], ) async def test_effect_template( - hass: HomeAssistant, expected_effect, count, effect_template + hass: HomeAssistant, expected_effect, setup_light_with_effects ) -> None: """Test the template for the effect.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": ( - "{{ ['Strobe color', 'Police', 'Christmas', 'RGB', 'Random Loop'] }}" - ), - "effect_template": effect_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("effect") == expected_effect @@ -1727,7 +2145,14 @@ async def test_effect_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_min_mireds", "min_mireds_template"), + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "min_mireds_template"), + (ConfigurationStyle.MODERN, "min_mireds"), + ], +) +@pytest.mark.parametrize( + ("expected_min_mireds", "attribute_template"), [ (118, "{{118}}"), (153, "{{x - 12}}"), @@ -1738,25 +2163,9 @@ async def test_effect_template( ], ) async def test_min_mireds_template( - hass: HomeAssistant, expected_min_mireds, count, min_mireds_template + hass: HomeAssistant, expected_min_mireds, setup_light_with_mireds ) -> None: """Test the template for the min mireds.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "temperature_template": "{{200}}", - "min_mireds_template": min_mireds_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("min_mireds") == expected_min_mireds @@ -1764,7 +2173,14 @@ async def test_min_mireds_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_max_mireds", "max_mireds_template"), + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "max_mireds_template"), + (ConfigurationStyle.MODERN, "max_mireds"), + ], +) +@pytest.mark.parametrize( + ("expected_max_mireds", "attribute_template"), [ (488, "{{488}}"), (500, "{{x - 12}}"), @@ -1775,33 +2191,26 @@ async def test_min_mireds_template( ], ) async def test_max_mireds_template( - hass: HomeAssistant, expected_max_mireds, count, max_mireds_template + hass: HomeAssistant, expected_max_mireds, setup_light_with_mireds ) -> None: """Test the template for the max mireds.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "temperature_template": "{{200}}", - "max_mireds_template": max_mireds_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("max_mireds") == expected_max_mireds -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_supports_transition", "supports_transition_template"), + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "supports_transition_template"), + (ConfigurationStyle.MODERN, "supports_transition"), + ], +) +@pytest.mark.parametrize( + ("expected_supports_transition", "attribute_template"), [ (True, "{{true}}"), (True, "{{1 == 1}}"), @@ -1812,28 +2221,9 @@ async def test_max_mireds_template( ], ) async def test_supports_transition_template( - hass: HomeAssistant, - expected_supports_transition, - count, - supports_transition_template, + hass: HomeAssistant, expected_supports_transition, setup_single_attribute_light ) -> None: """Test the template for the supports transition.""" - light_config = { - "test_template_light": { - "value_template": "{{ 1 == 1 }}", - "turn_on": {"service": "light.turn_on", "entity_id": "light.test_state"}, - "turn_off": {"service": "light.turn_off", "entity_id": "light.test_state"}, - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "supports_transition_template": supports_transition_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") expected_value = 1 @@ -1847,36 +2237,16 @@ async def test_supports_transition_template( ) != expected_value -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("count", "transition_template"), [(1, "{{ states('sensor.test') }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_supports_transition_template_updates( - hass: HomeAssistant, count: int + hass: HomeAssistant, setup_light_with_transition_template ) -> None: """Test the template for the supports transition dynamically.""" - light_config = { - "test_template_light": { - "value_template": "{{ 1 == 1 }}", - "turn_on": {"service": "light.turn_on", "entity_id": "light.test_state"}, - "turn_off": {"service": "light.turn_off", "entity_id": "light.test_state"}, - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ None }}", - "supports_transition_template": "{{ states('sensor.test') }}", - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None @@ -1901,22 +2271,25 @@ async def test_supports_transition_template_updates( assert supported_features == LightEntityFeature.EFFECT -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config", "attribute_template"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "availability_template": ( - "{{ is_state('availability_boolean.state', 'on') }}" - ), - } - }, + ( + 1, + OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "{{ is_state('availability_boolean.state', 'on') }}", + ) + ], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), ], ) async def test_available_template_with_entities( - hass: HomeAssistant, setup_light + hass: HomeAssistant, setup_single_attribute_light ) -> None: """Test availability templates with values from other entities.""" # When template returns true.. @@ -1934,20 +2307,25 @@ async def test_available_template_with_entities( assert hass.states.get("light.test_template_light").state == STATE_UNAVAILABLE -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config", "attribute_template"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "availability_template": "{{ x - 12 }}", - } - }, + ( + 1, + OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "{{ x - 12 }}", + ) + ], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), ], ) async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, setup_light, caplog_setup_text + hass: HomeAssistant, setup_single_attribute_light, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("light.test_template_light").state != STATE_UNAVAILABLE @@ -1956,20 +2334,73 @@ async def test_invalid_availability_template_keeps_component_available( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light_01": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "unique_id": "not-so-unique-anymore", + ( + { + "test_template_light_01": TEST_UNIQUE_ID_CONFIG, + "test_template_light_02": TEST_UNIQUE_ID_CONFIG, }, - "test_template_light_02": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "unique_id": "not-so-unique-anymore", - }, - }, + ConfigurationStyle.LEGACY, + ), + ( + [ + { + "name": "test_template_light_01", + **TEST_UNIQUE_ID_CONFIG, + }, + { + "name": "test_template_light_02", + **TEST_UNIQUE_ID_CONFIG, + }, + ], + ConfigurationStyle.MODERN, + ), ], ) async def test_unique_id(hass: HomeAssistant, setup_light) -> None: """Test unique_id option only creates one light per id.""" assert len(hass.states.async_all("light")) == 1 + + +async def test_nested_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test unique_id option creates one light per nested id.""" + + with assert_setup_component(1, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + { + "template": { + "unique_id": "x", + "light": [ + { + "name": "test_a", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "a", + }, + { + "name": "test_b", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "b", + }, + ], + } + }, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + assert len(hass.states.async_all("light")) == 2 + + entry = entity_registry.async_get("light.test_a") + assert entry + assert entry.unique_id == "x-a" + + entry = entity_registry.async_get("light.test_b") + assert entry + assert entry.unique_id == "x-b" diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 2fc0f29acaf..f0dbe43b51e 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,11 +1,13 @@ """The tests for the Template switch platform.""" +from typing import Any + import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant import setup -from homeassistant.components import template +from homeassistant.components import switch, template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.template.switch import rewrite_legacy_to_modern_conf from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -16,8 +18,11 @@ from homeassistant.const import ( ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.template import Template from homeassistant.setup import async_setup_component +from .conftest import ConfigurationStyle + from tests.common import ( MockConfigEntry, assert_setup_component, @@ -25,26 +30,225 @@ from tests.common import ( mock_restore_cache, ) -OPTIMISTIC_SWITCH_CONFIG = { - "turn_on": { - "service": "test.automation", - "data_template": { - "action": "turn_on", - "caller": "{{ this.entity_id }}", - }, +TEST_OBJECT_ID = "test_template_switch" +TEST_ENTITY_ID = f"switch.{TEST_OBJECT_ID}" +TEST_STATE_ENTITY_ID = "switch.test_state" + +SWITCH_TURN_ON = { + "service": "test.automation", + "data_template": { + "action": "turn_on", + "caller": "{{ this.entity_id }}", }, - "turn_off": { - "service": "test.automation", - "data_template": { - "action": "turn_off", - "caller": "{{ this.entity_id }}", - }, +} +SWITCH_TURN_OFF = { + "service": "test.automation", + "data_template": { + "action": "turn_off", + "caller": "{{ this.entity_id }}", }, } +SWITCH_ACTIONS = { + "turn_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, +} +NAMED_SWITCH_ACTIONS = { + **SWITCH_ACTIONS, + "name": TEST_OBJECT_ID, +} +UNIQUE_ID_CONFIG = { + **SWITCH_ACTIONS, + "unique_id": "not-so-unique-anymore", +} +async def async_setup_legacy_format( + hass: HomeAssistant, count: int, switch_config: dict[str, Any] +) -> None: + """Do setup of switch integration via legacy format.""" + config = {"switch": {"platform": "template", "switches": switch_config}} + + with assert_setup_component(count, switch.DOMAIN): + assert await async_setup_component( + hass, + switch.DOMAIN, + config, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +async def async_setup_modern_format( + hass: HomeAssistant, count: int, switch_config: dict[str, Any] +) -> None: + """Do setup of switch integration via modern format.""" + config = {"template": {"switch": switch_config}} + + with assert_setup_component(count, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + config, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +@pytest.fixture +async def setup_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + switch_config: dict[str, Any], +) -> None: + """Do setup of switch integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format(hass, count, switch_config) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format(hass, count, switch_config) + + +@pytest.fixture +async def setup_state_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str, +): + """Do setup of switch integration using a state template.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": state_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + "state": state_template, + }, + ) + + +@pytest.fixture +async def setup_single_attribute_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, +) -> None: + """Do setup of switch integration testing a single attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": "{{ 1 == 1 }}", + **extra, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + "state": "{{ 1 == 1 }}", + **extra, + }, + ) + + +@pytest.fixture +async def setup_optimistic_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, +) -> None: + """Do setup of an optimistic switch.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + }, + ) + + +async def test_legacy_to_modern_config(hass: HomeAssistant) -> None: + """Test the conversion of legacy template to modern template.""" + config = { + "foo": { + "friendly_name": "foo bar", + "value_template": "{{ 1 == 1 }}", + "unique_id": "foo-bar-switch", + "icon_template": "{{ 'mdi.abc' }}", + "entity_picture_template": "{{ 'mypicture.jpg' }}", + "availability_template": "{{ 1 == 1 }}", + **SWITCH_ACTIONS, + } + } + altered_configs = rewrite_legacy_to_modern_conf(hass, config) + + assert len(altered_configs) == 1 + assert [ + { + "availability": Template("{{ 1 == 1 }}", hass), + "icon": Template("{{ 'mdi.abc' }}", hass), + "name": Template("foo bar", hass), + "object_id": "foo", + "picture": Template("{{ 'mypicture.jpg' }}", hass), + "turn_off": SWITCH_TURN_OFF, + "turn_on": SWITCH_TURN_ON, + "unique_id": "foo-bar-switch", + "state": Template("{{ 1 == 1 }}", hass), + } + ] == altered_configs + + +@pytest.mark.parametrize(("count", "state_template"), [(1, "{{ True }}")]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_setup(hass: HomeAssistant, setup_state_switch) -> None: + """Test template.""" + state = hass.states.get(TEST_ENTITY_ID) + assert state is not None + assert state.name == TEST_OBJECT_ID + assert state.state == STATE_ON + + +@pytest.mark.parametrize("state_key", ["value_template", "state"]) async def test_setup_config_entry( hass: HomeAssistant, + state_key: str, snapshot: SnapshotAssertion, ) -> None: """Test the config flow.""" @@ -60,7 +264,7 @@ async def test_setup_config_entry( domain=template.DOMAIN, options={ "name": "My template", - "value_template": "{{ states('switch.one') }}", + state_key: "{{ states('switch.one') }}", "template_type": SWITCH_DOMAIN, }, title="My template", @@ -75,200 +279,108 @@ async def test_setup_config_entry( assert state == snapshot -async def test_template_state_text(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states.switch.test_state.state }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_template_state_text(hass: HomeAssistant, setup_state_switch) -> None: """Test the state text of a template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON - hass.states.async_set("switch.test_state", STATE_OFF) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF -async def test_template_state_boolean_on(hass: HomeAssistant) -> None: - """Test the setting of the state with boolean on.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 1 }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") - assert state.state == STATE_ON +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected", "state_template"), + [ + (STATE_ON, "{{ 1 == 1 }}"), + (STATE_OFF, "{{ 1 == 2 }}"), + ], +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_template_state_boolean( + hass: HomeAssistant, expected: str, setup_state_switch +) -> None: + """Test the setting of the state with boolean template.""" + state = hass.states.get(TEST_ENTITY_ID) + assert state.state == expected -async def test_template_state_boolean_off(hass: HomeAssistant) -> None: - """Test the setting of the state with off.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 2 }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") - assert state.state == STATE_OFF - - -async def test_icon_template(hass: HomeAssistant) -> None: - """Test icon template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - "icon_template": ( - "{% if states.switch.test_state.state %}" - "mdi:check" - "{% endif %}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{% if states.switch.test_state.state %}mdi:check{% endif %}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "icon_template"), + (ConfigurationStyle.MODERN, "icon"), + ], +) +async def test_icon_template( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: + """Test the state text of a template.""" + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get("icon") == "" - hass.states.async_set("switch.test_state", STATE_ON) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes["icon"] == "mdi:check" -async def test_entity_picture_template(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{% if states.switch.test_state.state %}/local/switch.png{% endif %}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "entity_picture_template"), + (ConfigurationStyle.MODERN, "picture"), + ], +) +async def test_entity_picture_template( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: """Test entity_picture template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - "entity_picture_template": ( - "{% if states.switch.test_state.state %}" - "/local/switch.png" - "{% endif %}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get("entity_picture") == "" - hass.states.async_set("switch.test_state", STATE_ON) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes["entity_picture"] == "/local/switch.png" -async def test_template_syntax_error(hass: HomeAssistant) -> None: +@pytest.mark.parametrize(("count", "state_template"), [(0, "{% if rubbish %}")]) +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN], +) +async def test_template_syntax_error(hass: HomeAssistant, setup_state_switch) -> None: """Test templating syntax error.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{% if rubbish %}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - assert hass.states.async_all("switch") == [] -async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: - """Test invalid name.""" +async def test_invalid_legacy_slug_does_not_create(hass: HomeAssistant) -> None: + """Test invalid legacy slug.""" with assert_setup_component(0, "switch"): assert await async_setup_component( hass, @@ -278,7 +390,7 @@ async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: "platform": "template", "switches": { "test INVALID switch": { - **OPTIMISTIC_SWITCH_CONFIG, + **SWITCH_ACTIONS, "value_template": "{{ rubbish }", } }, @@ -293,19 +405,32 @@ async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_invalid_switch_does_not_create(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "template": {"switch": "Invalid"}, + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": {TEST_OBJECT_ID: "Invalid"}, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_invalid_switch_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: """Test invalid switch.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": {"test_template_switch": "Invalid"}, - } - }, - ) + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() @@ -314,12 +439,33 @@ async def test_invalid_switch_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_no_switches_does_not_create(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("config", "domain", "count"), + [ + ( + { + "template": {"switch": []}, + }, + template.DOMAIN, + 1, + ), + ( + { + "switch": { + "platform": "template", + } + }, + switch.DOMAIN, + 0, + ), + ], +) +async def test_no_switches_does_not_create( + hass: HomeAssistant, config: dict, domain: str, count: int +) -> None: """Test if there are no switches no creation.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, "switch", {"switch": {"platform": "template"}} - ) + with assert_setup_component(count, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() @@ -328,239 +474,254 @@ async def test_no_switches_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_missing_on_does_not_create(hass: HomeAssistant) -> None: - """Test missing on.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - "value_template": "{{ states.switch.test_state.state }}", - "not_on": { - "service": "switch.turn_on", - "entity_id": "switch.test_state", - }, - "turn_off": { - "service": "switch.turn_off", - "entity_id": "switch.test_state", - }, - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - assert hass.states.async_all("switch") == [] - - -async def test_missing_off_does_not_create(hass: HomeAssistant) -> None: - """Test missing off.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - "value_template": "{{ states.switch.test_state.state }}", - "turn_on": { - "service": "switch.turn_on", - "entity_id": "switch.test_state", - }, - "not_off": { - "service": "switch.turn_off", - "entity_id": "switch.test_state", - }, - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - assert hass.states.async_all("switch") == [] - - -async def test_on_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: - """Test on action.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", + "template": { + "switch": { + "not_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, + "state": "{{ states.switch.test_state.state }}", } }, - } - }, - ) + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + "not_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_missing_on_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: + """Test missing on.""" + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_OFF) + assert hass.states.async_all("switch") == [] + + +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "template": { + "switch": { + "turn_on": SWITCH_TURN_ON, + "not_off": SWITCH_TURN_OFF, + "state": "{{ states.switch.test_state.state }}", + } + }, + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + "turn_on": SWITCH_TURN_ON, + "not_off": SWITCH_TURN_OFF, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_missing_off_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: + """Test missing off.""" + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) + + await hass.async_block_till_done() + await hass.async_start() await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + assert hass.states.async_all("switch") == [] + + +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states('switch.test_state') }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_on_action( + hass: HomeAssistant, setup_state_switch, calls: list[ServiceCall] +) -> None: + """Test on action.""" + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) + await hass.async_block_till_done() + + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "turn_on" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_on_action_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, setup_optimistic_switch, calls: list[ServiceCall] ) -> None: """Test on action in optimistic mode.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - } - }, - } - }, - ) - - await hass.async_start() + hass.states.async_set(TEST_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - hass.states.async_set("switch.test_template_switch", STATE_OFF) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON assert len(calls) == 1 assert calls[-1].data["action"] == "turn_on" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID -async def test_off_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states.switch.test_state.state }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_off_action( + hass: HomeAssistant, setup_state_switch, calls: list[ServiceCall] +) -> None: """Test off action.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "turn_off" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_off_action_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, setup_optimistic_switch, calls: list[ServiceCall] ) -> None: """Test off action in optimistic mode.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - } - }, - } - }, - ) - - await hass.async_start() + hass.states.async_set(TEST_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_template_switch", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF assert len(calls) == 1 assert calls[-1].data["action"] == "turn_off" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID -async def test_restore_state(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "switch": { + "platform": "template", + "switches": { + "s1": { + **SWITCH_ACTIONS, + }, + "s2": { + **SWITCH_ACTIONS, + }, + }, + } + }, + switch.DOMAIN, + ), + ( + { + "template": { + "switch": [ + { + "name": "s1", + **SWITCH_ACTIONS, + }, + { + "name": "s2", + **SWITCH_ACTIONS, + }, + ], + } + }, + template.DOMAIN, + ), + ], +) +async def test_restore_state( + hass: HomeAssistant, count: int, domain: str, config: dict[str, Any] +) -> None: """Test state restoration.""" mock_restore_cache( hass, @@ -573,23 +734,9 @@ async def test_restore_state(hass: HomeAssistant) -> None: hass.set_state(CoreState.starting) mock_component(hass, "recorder") - await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "s1": { - **OPTIMISTIC_SWITCH_CONFIG, - }, - "s2": { - **OPTIMISTIC_SWITCH_CONFIG, - }, - }, - } - }, - ) + with assert_setup_component(count, domain): + await async_setup_component(hass, domain, config) + await hass.async_block_till_done() state = hass.states.get("switch.s1") @@ -601,100 +748,157 @@ async def test_restore_state(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -async def test_available_template_with_entities(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{{ is_state('switch.test_state', 'on') }}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), + ], +) +async def test_available_template_with_entities( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: """Test availability templates with values from other entities.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 1 }}", - "availability_template": ( - "{{ is_state('availability_state.state', 'on') }}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("availability_state.state", STATE_ON) + assert hass.states.get(TEST_ENTITY_ID).state != STATE_UNAVAILABLE + + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("switch.test_template_switch").state != STATE_UNAVAILABLE - - hass.states.async_set("availability_state.state", STATE_OFF) - await hass.async_block_till_done() - - assert hass.states.get("switch.test_template_switch").state == STATE_UNAVAILABLE + assert hass.states.get(TEST_ENTITY_ID).state == STATE_UNAVAILABLE +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": "{{ true }}", + "availability_template": "{{ x - 12 }}", + } + }, + } + }, + switch.DOMAIN, + ), + ( + { + "template": { + "switch": { + **NAMED_SWITCH_ACTIONS, + "state": "{{ true }}", + "availability": "{{ x - 12 }}", + }, + } + }, + template.DOMAIN, + ), + ], +) async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + count: int, + config: dict[str, Any], + domain: str, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that an invalid availability keeps the device available.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ true }}", - "availability_template": "{{ x - 12 }}", - } - }, - } - }, - ) + with assert_setup_component(count, domain): + await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - assert hass.states.get("switch.test_template_switch").state != STATE_UNAVAILABLE + assert hass.states.get(TEST_ENTITY_ID).state != STATE_UNAVAILABLE assert "UndefinedError: 'x' is undefined" in caplog.text -async def test_unique_id(hass: HomeAssistant) -> None: - """Test unique_id option only creates one switch per id.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch_01": { - **OPTIMISTIC_SWITCH_CONFIG, - "unique_id": "not-so-unique-anymore", - "value_template": "{{ true }}", - }, - "test_template_switch_02": { - **OPTIMISTIC_SWITCH_CONFIG, - "unique_id": "not-so-unique-anymore", - "value_template": "{{ false }}", - }, +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("switch_config", "style"), + [ + ( + { + "test_template_switch_01": UNIQUE_ID_CONFIG, + "test_template_switch_02": UNIQUE_ID_CONFIG, + }, + ConfigurationStyle.LEGACY, + ), + ( + [ + { + "name": "test_template_switch_01", + **UNIQUE_ID_CONFIG, }, - } - }, - ) + { + "name": "test_template_switch_02", + **UNIQUE_ID_CONFIG, + }, + ], + ConfigurationStyle.MODERN, + ), + ], +) +async def test_unique_id(hass: HomeAssistant, setup_switch) -> None: + """Test unique_id option only creates one switch per id.""" + assert len(hass.states.async_all("switch")) == 1 + + +async def test_nested_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test a template unique_id propagates to switch unique_ids.""" + with assert_setup_component(1, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + { + "template": { + "unique_id": "x", + "switch": [ + { + **SWITCH_ACTIONS, + "name": "test_a", + "unique_id": "a", + "state": "{{ true }}", + }, + { + **SWITCH_ACTIONS, + "name": "test_b", + "unique_id": "b", + "state": "{{ true }}", + }, + ], + }, + }, + ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - assert len(hass.states.async_all("switch")) == 1 + assert len(hass.states.async_all("switch")) == 2 + + entry = entity_registry.async_get("switch.test_a") + assert entry + assert entry.unique_id == "x-a" + + entry = entity_registry.async_get("switch.test_b") + assert entry + assert entry.unique_id == "x-b" async def test_device_id( @@ -720,7 +924,7 @@ async def test_device_id( domain=template.DOMAIN, options={ "name": "My template", - "value_template": "{{ true }}", + "state": "{{ true }}", "template_type": "switch", "device_id": device_entry.id, }, diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 06d2b54c936..10b01caca96 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures for Tessie.""" +"""Fixtures for Tesla Fleet.""" from __future__ import annotations @@ -113,7 +113,7 @@ def mock_products() -> Generator[AsyncMock]: def mock_vehicle_state() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", + "tesla_fleet_api.tesla.VehicleFleet.vehicle", return_value=VEHICLE_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -123,7 +123,7 @@ def mock_vehicle_state() -> Generator[AsyncMock]: def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", + "tesla_fleet_api.tesla.VehicleFleet.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -133,7 +133,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_wake_up() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", + "tesla_fleet_api.tesla.VehicleFleet.wake_up", return_value=VEHICLE_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -143,7 +143,7 @@ def mock_wake_up() -> Generator[AsyncMock]: def mock_live_status() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific live_status method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.live_status", + "tesla_fleet_api.tesla.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -153,7 +153,7 @@ def mock_live_status() -> Generator[AsyncMock]: def mock_site_info() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific site_info method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.site_info", + "tesla_fleet_api.tesla.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -182,7 +182,7 @@ def mock_request(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -192,7 +192,7 @@ def mock_energy_history(): def mock_signed_command() -> Generator[AsyncMock]: """Mock Tesla Fleet Api signed_command method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + "tesla_fleet_api.tesla.VehicleSigned.signed_command", return_value=COMMAND_OK, ) as mock_signed_command: yield mock_signed_command diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index ef1cfd90357..d43f7448379 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -56,7 +56,7 @@ async def test_press( await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) with patch( - f"homeassistant.components.tesla_fleet.VehicleSpecific.{func}", + f"tesla_fleet_api.tesla.VehicleFleet.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_press_signing_error( with ( patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), patch( - "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + "tesla_fleet_api.tesla.VehicleSigned.flash_lights", side_effect=NotOnWhitelistFault, ), pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py index b45e5259a5c..fae79c795c2 100644 --- a/tests/components/tesla_fleet/test_climate.py +++ b/tests/components/tesla_fleet/test_climate.py @@ -257,7 +257,7 @@ async def test_invalid_error( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises( @@ -285,7 +285,7 @@ async def test_errors( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -308,7 +308,7 @@ async def test_ignored_error( await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py index ac5307b2fdd..15d14f34a87 100644 --- a/tests/components/tesla_fleet/test_cover.py +++ b/tests/components/tesla_fleet/test_cover.py @@ -89,7 +89,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.tesla.VehicleFleet.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -118,7 +118,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -133,7 +133,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -150,7 +150,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -167,7 +167,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -196,7 +196,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.tesla.VehicleFleet.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_lock.py b/tests/components/tesla_fleet/test_lock.py index 00b77aefcaf..ac9a7b49b55 100644 --- a/tests/components/tesla_fleet/test_lock.py +++ b/tests/components/tesla_fleet/test_lock.py @@ -59,7 +59,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_lock", + "tesla_fleet_api.tesla.VehicleFleet.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -73,7 +73,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_unlock", + "tesla_fleet_api.tesla.VehicleFleet.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -97,7 +97,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_media_player.py b/tests/components/tesla_fleet/test_media_player.py index 4c833e7499f..b2900d96c80 100644 --- a/tests/components/tesla_fleet/test_media_player.py +++ b/tests/components/tesla_fleet/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.adjust_volume", + "tesla_fleet_api.tesla.VehicleFleet.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -102,7 +102,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -117,7 +117,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -131,7 +131,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_next_track", + "tesla_fleet_api.tesla.VehicleFleet.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -144,7 +144,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_prev_track", + "tesla_fleet_api.tesla.VehicleFleet.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_number.py b/tests/components/tesla_fleet/test_number.py index 8551a99ee29..4ade98852c8 100644 --- a/tests/components/tesla_fleet/test_number.py +++ b/tests/components/tesla_fleet/test_number.py @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.tesla.VehicleFleet.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.tesla.VehicleFleet.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -87,7 +87,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.backup", + "tesla_fleet_api.tesla.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -105,7 +105,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tesla.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_select.py b/tests/components/tesla_fleet/test_select.py index 902b28ddb7a..f06d67041c9 100644 --- a/tests/components/tesla_fleet/test_select.py +++ b/tests/components/tesla_fleet/test_select.py @@ -61,11 +61,11 @@ async def test_select_services( entity_id = "select.test_seat_heater_front_left" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_seat_heater_request", return_value=COMMAND_OK, ) as remote_seat_heater_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -83,11 +83,11 @@ async def test_select_services( entity_id = "select.test_steering_wheel_heater" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as remote_steering_wheel_heat_level_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -104,7 +104,7 @@ async def test_select_services( entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.operation", + "tesla_fleet_api.tesla.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -122,7 +122,7 @@ async def test_select_services( entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.grid_import_export", + "tesla_fleet_api.tesla.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py index fba4fc05cc4..022c3a0ab18 100644 --- a/tests/components/tesla_fleet/test_switch.py +++ b/tests/components/tesla_fleet/test_switch.py @@ -71,41 +71,41 @@ async def test_switch_offline( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "VehicleFleet.charge_start", "VehicleFleet.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "VehicleFleet.set_sentry_mode", + "VehicleFleet.set_sentry_mode", ), ], ) @@ -122,7 +122,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.tesla_fleet.{on}", + f"tesla_fleet_api.tesla.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.tesla_fleet.{off}", + f"tesla_fleet_api.tesla.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index e89bab9eff1..0152543e512 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -25,7 +25,7 @@ from .const import ( def mock_metadata(): """Mock Tesla Fleet Api metadata method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.metadata", return_value=METADATA + "tesla_fleet_api.teslemetry.Teslemetry.metadata", return_value=METADATA ) as mock_products: yield mock_products @@ -34,7 +34,7 @@ def mock_metadata(): def mock_products(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.products", return_value=PRODUCTS + "tesla_fleet_api.teslemetry.Teslemetry.products", return_value=PRODUCTS ) as mock_products: yield mock_products @@ -43,7 +43,7 @@ def mock_products(): def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", + "tesla_fleet_api.teslemetry.Vehicle.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -53,7 +53,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_legacy(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True + "tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True ) as mock_pre2021: yield mock_pre2021 @@ -62,7 +62,7 @@ def mock_legacy(): def mock_wake_up(): """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.wake_up", + "tesla_fleet_api.teslemetry.Vehicle.wake_up", return_value=WAKE_UP_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -72,7 +72,7 @@ def mock_wake_up(): def mock_vehicle() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle", + "tesla_fleet_api.teslemetry.Vehicle.vehicle", return_value=WAKE_UP_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -82,7 +82,7 @@ def mock_vehicle() -> Generator[AsyncMock]: def mock_request(): """Mock Tesla Fleet API Vehicle Specific class.""" with patch( - "homeassistant.components.teslemetry.Teslemetry._request", + "tesla_fleet_api.teslemetry.Teslemetry._request", return_value=COMMAND_OK, ) as mock_request: yield mock_request @@ -92,7 +92,7 @@ def mock_request(): def mock_live_status(): """Mock Teslemetry Energy Specific live_status method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.live_status", + "tesla_fleet_api.tesla.energysite.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -102,7 +102,7 @@ def mock_live_status(): def mock_site_info(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.site_info", + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -112,7 +112,7 @@ def mock_site_info(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.energysite.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -122,7 +122,7 @@ def mock_energy_history(): def mock_add_listener(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.async_add_listener", + "teslemetry_stream.TeslemetryStream.async_add_listener", ) as mock_add_listener: mock_add_listener.listeners = [] @@ -165,7 +165,7 @@ def mock_stream_update_config(): def mock_stream_connected(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.connected", + "teslemetry_stream.TeslemetryStream.connected", return_value=True, ) as mock_stream_connected: yield mock_stream_connected diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 40d55dab71f..31915630951 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -18,7 +18,6 @@ VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) ENERGY_HISTORY = load_json_object_fixture("energy_history.json", DOMAIN) -METADATA = load_json_object_fixture("metadata.json", DOMAIN) COMMAND_OK = {"response": {"result": True, "reason": ""}} COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} @@ -52,7 +51,7 @@ METADATA = { "proxy": False, "access": True, "polling": True, - "firmware": "2024.44.25", + "firmware": "2026.0.0", } }, } diff --git a/tests/components/teslemetry/fixtures/metadata.json b/tests/components/teslemetry/fixtures/metadata.json deleted file mode 100644 index 60282afc934..00000000000 --- a/tests/components/teslemetry/fixtures/metadata.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "uid": "abc-123", - "region": "NA", - "scopes": [ - "openid", - "offline_access", - "user_data", - "vehicle_device_data", - "vehicle_cmds", - "vehicle_charging_cmds", - "energy_device_data", - "energy_cmds" - ], - "vehicles": { - "LRW3F7EK4NC700000": { - "access": true, - "polling": true, - "proxy": true, - "firmware": "2024.44.25" - } - } -} diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 0cd238c4e52..051c7199d00 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -192,7 +192,7 @@ "api_version": 71, "autopark_state_v2": "unavailable", "calendar_supported": true, - "car_version": "2024.44.25 06f534d46010", + "car_version": "2026.0.0 06f534d46010", "center_display_state": 0, "dashcam_clip_save_available": true, "dashcam_state": "Recording", diff --git a/tests/components/teslemetry/snapshots/test_binary_sensor.ambr b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr index 6a6e9826dc2..a295dc16344 100644 --- a/tests/components/teslemetry/snapshots/test_binary_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr @@ -1371,6 +1371,147 @@ 'state': 'unknown', }) # --- +# name: test_binary_sensor[binary_sensor.test_located_at_favorite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at favorite', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_favorite', + 'unique_id': 'LRW3F7EK4NC700000-located_at_favorite', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_favorite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at favorite', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at home', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_home', + 'unique_id': 'LRW3F7EK4NC700000-located_at_home', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at home', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_work-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_work', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at work', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_work', + 'unique_id': 'LRW3F7EK4NC700000-located_at_work', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_work-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at work', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_work', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_binary_sensor[binary_sensor.test_offroad_lightbar-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1490,7 +1631,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Pin to drive enabled', + 'original_name': 'PIN to Drive enabled', 'platform': 'teslemetry', 'previous_unique_id': None, 'supported_features': 0, @@ -1502,7 +1643,7 @@ # name: test_binary_sensor[binary_sensor.test_pin_to_drive_enabled-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pin to drive enabled', + 'friendly_name': 'Test PIN to Drive enabled', }), 'context': , 'entity_id': 'binary_sensor.test_pin_to_drive_enabled', @@ -2801,6 +2942,45 @@ 'state': 'unknown', }) # --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_favorite-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at favorite', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_home-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at home', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_work-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at work', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_work', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_binary_sensor_refresh[binary_sensor.test_offroad_lightbar-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -2830,7 +3010,7 @@ # name: test_binary_sensor_refresh[binary_sensor.test_pin_to_drive_enabled-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pin to drive enabled', + 'friendly_name': 'Test PIN to Drive enabled', }), 'context': , 'entity_id': 'binary_sensor.test_pin_to_drive_enabled', diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index 4c265c00cb8..e0e68f23c79 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -1,10 +1,4 @@ # serializer version: 1 -# name: test_asleep_or_offline[HomeAssistantError] - 'Timed out trying to wake up vehicle' -# --- -# name: test_asleep_or_offline[InvalidCommand] - 'Failed to wake up vehicle: The data request or command is unknown.' -# --- # name: test_climate[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -78,6 +72,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -113,7 +111,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -123,6 +121,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -137,7 +140,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -220,6 +223,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -255,7 +262,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -265,6 +272,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -279,7 +291,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -297,7 +309,9 @@ 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ + , , + , ]), 'max_temp': 40, 'min_temp': 30, @@ -339,6 +353,7 @@ 'capabilities': dict({ 'hvac_modes': list([ , + , ]), 'max_temp': 28.0, 'min_temp': 15.0, @@ -374,3 +389,85 @@ # name: test_invalid_error[error] 'Command returned exception: The data request or command is unknown.' # --- +# name: test_select_streaming[climate.test_cabin_overheat_protection] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_select_streaming[climate.test_climate LHD] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- +# name: test_select_streaming[climate.test_climate] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 56a8f759a21..a39e8a0ff74 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -375,7 +375,7 @@ 'vehicle_state_api_version': 71, 'vehicle_state_autopark_state_v2': 'unavailable', 'vehicle_state_calendar_supported': True, - 'vehicle_state_car_version': '2024.44.25 06f534d46010', + 'vehicle_state_car_version': '2026.0.0 06f534d46010', 'vehicle_state_center_display_state': 0, 'vehicle_state_dashcam_clip_save_available': True, 'vehicle_state_dashcam_state': 'Recording', diff --git a/tests/components/teslemetry/snapshots/test_media_player.ambr b/tests/components/teslemetry/snapshots/test_media_player.ambr index 663e91a502c..7f721b95289 100644 --- a/tests/components/teslemetry/snapshots/test_media_player.ambr +++ b/tests/components/teslemetry/snapshots/test_media_player.ambr @@ -47,7 +47,7 @@ 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', 'supported_features': , - 'volume_level': 0.16129355359011466, + 'volume_level': 0.16129354838709678, }), 'context': , 'entity_id': 'media_player.test_media_player', @@ -64,10 +64,12 @@ 'friendly_name': 'Test Media player', 'media_album_name': '', 'media_artist': '', + 'media_duration': 0.0, 'media_playlist': '', 'media_title': '', 'source': 'Spotify', 'supported_features': , + 'volume_level': 0.0, }), 'context': , 'entity_id': 'media_player.test_media_player', @@ -125,7 +127,43 @@ 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', 'supported_features': , - 'volume_level': 0.16129355359011466, + 'volume_level': 0.16129354838709678, + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_update_streaming[off] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update_streaming[on] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'media_album_name': 'Test Album', + 'media_artist': 'Test Artist', + 'media_duration': 60, + 'media_position': 5, + 'source': 'Spotify', + 'supported_features': , + 'volume_level': 0.1935483870967742, }), 'context': , 'entity_id': 'media_player.test_media_player', diff --git a/tests/components/teslemetry/snapshots/test_switch.ambr b/tests/components/teslemetry/snapshots/test_switch.ambr index f9997133044..0586b454a91 100644 --- a/tests/components/teslemetry/snapshots/test_switch.ambr +++ b/tests/components/teslemetry/snapshots/test_switch.ambr @@ -495,3 +495,21 @@ 'state': 'off', }) # --- +# name: test_switch_streaming[switch.test_auto_seat_climate_left] + 'on' +# --- +# name: test_switch_streaming[switch.test_auto_seat_climate_right] + 'off' +# --- +# name: test_switch_streaming[switch.test_auto_steering_wheel_heater] + 'on' +# --- +# name: test_switch_streaming[switch.test_charge] + 'on' +# --- +# name: test_switch_streaming[switch.test_defrost] + 'off' +# --- +# name: test_switch_streaming[switch.test_sentry_mode] + 'on' +# --- diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index 1c7d525af86..391d81c086e 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -41,7 +41,7 @@ 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, - 'installed_version': '2024.44.25', + 'installed_version': '2026.0.0', 'latest_version': '2024.12.0.0', 'release_summary': None, 'release_url': None, @@ -117,3 +117,128 @@ 'state': 'off', }) # --- +# name: test_update_streaming[downloading] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[installing] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[ready] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[restored] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.2.1', + 'latest_version': '2025.1.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update_streaming[updated] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.2.1', + 'latest_version': '2025.1.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/teslemetry/test_button.py b/tests/components/teslemetry/test_button.py index 75f94342f1e..46db33ce913 100644 --- a/tests/components/teslemetry/test_button.py +++ b/tests/components/teslemetry/test_button.py @@ -42,7 +42,7 @@ async def test_press(hass: HomeAssistant, name: str, func: str) -> None: await setup_platform(hass, [Platform.BUTTON]) with patch( - f"homeassistant.components.teslemetry.VehicleSpecific.{func}", + f"tesla_fleet_api.teslemetry.Vehicle.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 33f2e134806..27bed45c51f 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -2,10 +2,10 @@ from unittest.mock import AsyncMock, patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import InvalidCommand +from teslemetry_stream import Signal from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -24,15 +24,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, reload_platform, setup_platform from .const import ( COMMAND_ERRORS, COMMAND_IGNORED_REASON, METADATA_NOSCOPE, VEHICLE_DATA_ALT, - VEHICLE_DATA_ASLEEP, - WAKE_UP_ASLEEP, - WAKE_UP_ONLINE, ) @@ -41,6 +38,7 @@ async def test_climate( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -195,6 +193,7 @@ async def test_climate_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -211,7 +210,7 @@ async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) - with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises(HomeAssistantError) as error, @@ -235,7 +234,7 @@ async def test_errors(hass: HomeAssistant, response: str) -> None: with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -257,7 +256,7 @@ async def test_ignored_error( await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( @@ -269,71 +268,12 @@ async def test_ignored_error( mock_on.assert_called_once() -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_asleep_or_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - mock_wake_up: AsyncMock, - mock_vehicle: AsyncMock, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Tests asleep is handled.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ASLEEP - await setup_platform(hass, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - # Run a command but fail trying to wake up the vehicle - mock_wake_up.side_effect = InvalidCommand - with pytest.raises(HomeAssistantError) as error: - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="InvalidCommand") - mock_wake_up.assert_called_once() - - mock_wake_up.side_effect = None - mock_wake_up.reset_mock() - - # Run a command but timeout trying to wake up the vehicle - mock_wake_up.return_value = WAKE_UP_ASLEEP - mock_vehicle.return_value = WAKE_UP_ASLEEP - with ( - patch("homeassistant.components.teslemetry.helpers.asyncio.sleep"), - pytest.raises(HomeAssistantError) as error, - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="HomeAssistantError") - mock_wake_up.assert_called_once() - mock_vehicle.assert_called() - - mock_wake_up.reset_mock() - mock_vehicle.reset_mock() - mock_wake_up.return_value = WAKE_UP_ONLINE - mock_vehicle.return_value = WAKE_UP_ONLINE - - # Run a command and wake up the vehicle immediately - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True - ) - await hass.async_block_till_done() - mock_wake_up.assert_called_once() - - async def test_climate_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_metadata: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE @@ -363,3 +303,47 @@ async def test_climate_noscope( {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, blocking=True, ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the select entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.CLIMATE]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.INSIDE_TEMP: 26, + Signal.HVAC_AC_ENABLED: True, + Signal.CLIMATE_KEEPER_MODE: "ClimateKeeperModeOn", + Signal.RIGHT_HAND_DRIVE: True, + Signal.HVAC_LEFT_TEMPERATURE_REQUEST: 22, + Signal.HVAC_RIGHT_TEMPERATURE_REQUEST: 21, + Signal.CABIN_OVERHEAT_PROTECTION_MODE: "CabinOverheatProtectionModeStateOn", + Signal.CABIN_OVERHEAT_PROTECTION_TEMPERATURE_LIMIT: 35, + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + assert hass.states.get("climate.test_climate") == snapshot( + name="climate.test_climate LHD" + ) + + await reload_platform(hass, entry, [Platform.CLIMATE]) + + # Assert the entities restored their values + for entity_id in ( + "climate.test_climate", + "climate.test_cabin_overheat_protection", + ): + assert hass.states.get(entity_id) == snapshot(name=entity_id) diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 14af1e732fe..e3933931c9f 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -75,7 +75,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.teslemetry.Vehicle.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -104,7 +104,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,7 +119,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -153,7 +153,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -182,7 +182,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.teslemetry.Vehicle.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 5481e6cc034..fcf9c76c939 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -2,17 +2,14 @@ from unittest.mock import AsyncMock -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, - VehicleOffline, ) -from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform @@ -22,8 +19,6 @@ from homeassistant.helpers import device_registry as dr from . import setup_platform from .const import VEHICLE_DATA_ALT -from tests.common import async_fire_time_changed - ERRORS = [ (InvalidToken, ConfigEntryState.SETUP_ERROR), (SubscriptionRequired, ConfigEntryState.SETUP_ERROR), @@ -69,22 +64,6 @@ async def test_devices( assert device == snapshot(name=f"{device.identifiers}") -async def test_vehicle_refresh_offline( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test coordinator refresh with an error.""" - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle_data.assert_called_once() - mock_vehicle_data.reset_mock() - - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - - @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_vehicle_refresh_error( hass: HomeAssistant, diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index 848eee82c39..a74d613859f 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -57,7 +57,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_lock", + "tesla_fleet_api.teslemetry.Vehicle.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_unlock", + "tesla_fleet_api.teslemetry.Vehicle.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -95,7 +95,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index ae462bfd026..ab8f21ceda4 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -2,7 +2,9 @@ from unittest.mock import AsyncMock, patch +import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_LEVEL, @@ -18,7 +20,7 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, assert_entities_alt, setup_platform +from . import assert_entities, assert_entities_alt, reload_platform, setup_platform from .const import COMMAND_OK, METADATA_NOSCOPE, VEHICLE_DATA_ALT @@ -26,6 +28,7 @@ async def test_media_player( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct.""" @@ -38,6 +41,7 @@ async def test_media_player_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct.""" @@ -51,6 +55,7 @@ async def test_media_player_noscope( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_metadata: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct without required scope.""" @@ -62,6 +67,7 @@ async def test_media_player_noscope( async def test_media_player_services( hass: HomeAssistant, snapshot: SnapshotAssertion, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player services work.""" @@ -70,7 +76,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.adjust_volume", + "tesla_fleet_api.teslemetry.Vehicle.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -84,7 +90,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -99,7 +105,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -113,7 +119,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_next_track", + "tesla_fleet_api.teslemetry.Vehicle.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -126,7 +132,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_prev_track", + "tesla_fleet_api.teslemetry.Vehicle.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -137,3 +143,62 @@ async def test_media_player_services( ) state = hass.states.get(entity_id) call.assert_called_once() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_update_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the media player entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.MEDIA_PLAYER]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.CENTER_DISPLAY: "Off", + Signal.MEDIA_PLAYBACK_STATUS: None, + Signal.MEDIA_PLAYBACK_SOURCE: None, + Signal.MEDIA_AUDIO_VOLUME: None, + Signal.MEDIA_NOW_PLAYING_DURATION: None, + Signal.MEDIA_NOW_PLAYING_ELAPSED: None, + Signal.MEDIA_NOW_PLAYING_ARTIST: None, + Signal.MEDIA_NOW_PLAYING_ALBUM: None, + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="off") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.CENTER_DISPLAY: "Driving", + Signal.MEDIA_PLAYBACK_STATUS: "Playing", + Signal.MEDIA_PLAYBACK_SOURCE: "Spotify", + Signal.MEDIA_AUDIO_VOLUME: 2, + Signal.MEDIA_NOW_PLAYING_DURATION: 60000, + Signal.MEDIA_NOW_PLAYING_ELAPSED: 5000, + Signal.MEDIA_NOW_PLAYING_ARTIST: "Test Artist", + Signal.MEDIA_NOW_PLAYING_ALBUM: "Test Album", + }, + "createdAt": "2024-10-04T10:55:17.000Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="on") + + await reload_platform(hass, entry, [Platform.MEDIA_PLAYER]) + + # Ensure the restored state is the same as the previous state + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="on") diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 95eed5a3f1e..2c45631a060 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -42,7 +42,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.teslemetry.Vehicle.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.teslemetry.Vehicle.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.teslemetry.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -90,7 +90,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.teslemetry.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index c49e83803cd..b17b52903fa 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -41,7 +41,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_seat_heater_front_left" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_seat_heater_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -56,7 +56,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_steering_wheel_heater" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.teslemetry.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -89,7 +89,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.teslemetry.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_sensor.py b/tests/components/teslemetry/test_sensor.py index a488ebc8a06..213811f6ea0 100644 --- a/tests/components/teslemetry/test_sensor.py +++ b/tests/components/teslemetry/test_sensor.py @@ -31,9 +31,7 @@ async def test_sensors( freezer.move_to("2024-01-01 00:00:00+00:00") # Force the vehicle to use polling - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True - ): + with patch("tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True): entry = await setup_platform(hass, [Platform.SENSOR]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -72,7 +70,7 @@ async def test_sensors_streaming( Signal.AC_CHARGING_ENERGY_IN: 10, Signal.AC_CHARGING_POWER: 2, Signal.CHARGING_CABLE_TYPE: None, - Signal.TIME_TO_FULL_CHARGE: 10, + Signal.TIME_TO_FULL_CHARGE: 0.166666667, Signal.MINUTES_TO_ARRIVAL: None, }, "createdAt": "2024-10-04T10:45:17.537Z", diff --git a/tests/components/teslemetry/test_services.py b/tests/components/teslemetry/test_services.py index a5b55f5dcc5..bcf5407999f 100644 --- a/tests/components/teslemetry/test_services.py +++ b/tests/components/teslemetry/test_services.py @@ -51,7 +51,7 @@ async def test_services( ).device_id with patch( - "homeassistant.components.teslemetry.VehicleSpecific.navigation_gps_request", + "tesla_fleet_api.teslemetry.Vehicle.navigation_gps_request", return_value=COMMAND_OK, ) as navigation_gps_request: await hass.services.async_call( @@ -66,7 +66,7 @@ async def test_services( navigation_gps_request.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_charging", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_charging", return_value=COMMAND_OK, ) as set_scheduled_charging: await hass.services.async_call( @@ -93,7 +93,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_departure", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_departure", return_value=COMMAND_OK, ) as set_scheduled_departure: await hass.services.async_call( @@ -138,7 +138,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_valet_mode", + "tesla_fleet_api.teslemetry.Vehicle.set_valet_mode", return_value=COMMAND_OK, ) as set_valet_mode: await hass.services.async_call( @@ -154,7 +154,7 @@ async def test_services( set_valet_mode.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_activate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_activate", return_value=COMMAND_OK, ) as speed_limit_activate: await hass.services.async_call( @@ -170,7 +170,7 @@ async def test_services( speed_limit_activate.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_deactivate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_deactivate", return_value=COMMAND_OK, ) as speed_limit_deactivate: await hass.services.async_call( @@ -186,7 +186,7 @@ async def test_services( speed_limit_deactivate.assert_called_once() with patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_OK, ) as set_time_of_use: await hass.services.async_call( @@ -202,7 +202,7 @@ async def test_services( with ( patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_ERROR, ) as set_time_of_use, pytest.raises(HomeAssistantError), diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index 6a1ddb430ce..6b31a28db59 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, @@ -14,7 +15,7 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, assert_entities_alt, setup_platform +from . import assert_entities, assert_entities_alt, reload_platform, setup_platform from .const import COMMAND_OK, VEHICLE_DATA_ALT @@ -22,6 +23,7 @@ async def test_switch( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the switch entities are correct.""" @@ -34,6 +36,7 @@ async def test_switch_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the switch entities are correct.""" @@ -46,41 +49,41 @@ async def test_switch_alt( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "Vehicle.charge_start", "Vehicle.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "Vehicle.set_preconditioning_max", + "Vehicle.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "Vehicle.set_sentry_mode", + "Vehicle.set_sentry_mode", ), ], ) @@ -93,7 +96,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.teslemetry.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -107,7 +110,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.teslemetry.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,3 +122,47 @@ async def test_switch_services( state = hass.states.get(entity_id) assert state.state == STATE_OFF call.assert_called_once() + + +async def test_switch_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the switch entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.SWITCH]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SENTRY_MODE: "SentryModeStateIdle", + Signal.AUTO_SEAT_CLIMATE_LEFT: True, + Signal.AUTO_SEAT_CLIMATE_RIGHT: False, + Signal.HVAC_STEERING_WHEEL_HEAT_AUTO: True, + Signal.DEFROST_MODE: "DefrostModeStateOff", + Signal.DETAILED_CHARGE_STATE: "DetailedChargeStateCharging", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + # Reload the entry + await reload_platform(hass, entry, [Platform.SWITCH]) + + # Assert the entities restored their values + for entity_id in ( + "switch.test_sentry_mode", + "switch.test_auto_seat_climate_left", + "switch.test_auto_seat_climate_right", + "switch.test_auto_steering_wheel_heater", + "switch.test_defrost", + "switch.test_charge", + ): + state = hass.states.get(entity_id) + assert state.state == snapshot(name=entity_id) diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index 448f31afd67..af6c9d847f1 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -4,7 +4,9 @@ import copy from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.update import INSTALLING @@ -13,7 +15,7 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, reload_platform, setup_platform from .const import COMMAND_OK, VEHICLE_DATA, VEHICLE_DATA_ALT from tests.common import async_fire_time_changed @@ -23,6 +25,7 @@ async def test_update( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the update entities are correct.""" @@ -35,6 +38,7 @@ async def test_update_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the update entities are correct.""" @@ -48,6 +52,7 @@ async def test_update_services( mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, + mock_legacy: AsyncMock, ) -> None: """Tests that the update services work.""" @@ -56,7 +61,7 @@ async def test_update_services( entity_id = "update.test_update" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.schedule_software_update", + "tesla_fleet_api.teslemetry.Vehicle.schedule_software_update", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -78,3 +83,90 @@ async def test_update_services( state = hass.states.get(entity_id) assert state.attributes["in_progress"] == 1 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_update_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the select entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.UPDATE]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 50, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state == snapshot(name="downloading") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 100, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: 1, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="ready") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 100, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: 50, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="installing") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "", + Signal.VERSION: "2025.2.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="updated") + + await reload_platform(hass, entry, [Platform.UPDATE]) + + state = hass.states.get("update.test_update") + assert state == snapshot(name="restored") diff --git a/tests/components/tessie/conftest.py b/tests/components/tessie/conftest.py index e0aba73af17..5fb844ff6b4 100644 --- a/tests/components/tessie/conftest.py +++ b/tests/components/tessie/conftest.py @@ -85,7 +85,7 @@ def mock_request(): def mock_live_status(): """Mock Tesla Fleet API EnergySpecific live_status method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.live_status", + "tesla_fleet_api.tessie.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -95,7 +95,7 @@ def mock_live_status(): def mock_site_info(): """Mock Tesla Fleet API EnergySpecific site_info method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.site_info", + "tesla_fleet_api.tessie.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 0fb13779183..69bbe1c9087 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -67,7 +67,7 @@ async def test_numbers( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.tessie.EnergySite.backup", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_numbers( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tessie.EnergySite.off_grid_vehicle_charging_reserve", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index c78923fbf5b..64380d363fc 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -52,7 +52,7 @@ async def test_select( # Test site operation mode entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select( # Test site export mode entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.tessie.EnergySite.grid_import_export", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -129,7 +129,7 @@ async def test_errors(hass: HomeAssistant) -> None: # Test changing energy select with unknown error with ( patch( - "homeassistant.components.tessie.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", side_effect=UnsupportedVehicle, ) as mock_set, pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 690ad7d1ab4..f58468edfb7 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -61,13 +61,13 @@ async def test_switches( [ ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "storm_mode", + "storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "grid_import_export", + "grid_import_export", ), ], ) @@ -80,7 +80,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.tessie.EnergySite.{on}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( @@ -94,7 +94,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.tessie.EnergySite.{off}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/totalconnect/test_button.py b/tests/components/totalconnect/test_button.py index 80de004be1d..87764e55186 100644 --- a/tests/components/totalconnect/test_button.py +++ b/tests/components/totalconnect/test_button.py @@ -11,12 +11,7 @@ from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import ( - RESPONSE_ZONE_BYPASS_FAILURE, - RESPONSE_ZONE_BYPASS_SUCCESS, - TOTALCONNECT_REQUEST, - setup_platform, -) +from .common import setup_platform from tests.common import snapshot_platform @@ -34,12 +29,23 @@ async def test_entity_registry( await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) -@pytest.mark.parametrize("entity_id", [ZONE_BYPASS_ID, PANEL_BYPASS_ID]) -async def test_bypass_button(hass: HomeAssistant, entity_id: str) -> None: +@pytest.mark.parametrize( + ("entity_id", "tcc_request"), + [ + (ZONE_BYPASS_ID, "total_connect_client.zone.TotalConnectZone.bypass"), + ( + PANEL_BYPASS_ID, + "total_connect_client.location.TotalConnectLocation.zone_bypass_all", + ), + ], +) +async def test_bypass_button( + hass: HomeAssistant, entity_id: str, tcc_request: str +) -> None: """Test pushing a bypass button.""" - responses = [RESPONSE_ZONE_BYPASS_FAILURE, RESPONSE_ZONE_BYPASS_SUCCESS] + responses = [FailedToBypassZone, None] await setup_platform(hass, BUTTON) - with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: + with patch(tcc_request, side_effect=responses) as mock_request: # try to bypass, but fails with pytest.raises(FailedToBypassZone): await hass.services.async_call( diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index f5020394bce..b7ac42c84b5 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -28,6 +28,7 @@ from .common import ( TOTALCONNECT_REQUEST, TOTALCONNECT_REQUEST_TOKEN, USERNAME, + init_integration, ) from tests.common import MockConfigEntry @@ -219,42 +220,19 @@ async def test_no_locations(hass: HomeAssistant) -> None: async def test_options_flow(hass: HomeAssistant) -> None: """Test config flow options.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG_DATA, - unique_id=USERNAME, + config_entry = await init_integration(hass) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} ) - config_entry.add_to_hass(hass) - responses = [ - RESPONSE_SESSION_DETAILS, - RESPONSE_PARTITION_DETAILS, - RESPONSE_GET_ZONE_DETAILS_SUCCESS, - RESPONSE_DISARMED, - RESPONSE_DISARMED, - RESPONSE_DISARMED, - ] + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} + await hass.async_block_till_done() - with ( - patch(TOTALCONNECT_REQUEST, side_effect=responses), - patch(TOTALCONNECT_GET_CONFIG, side_effect=None), - patch(TOTALCONNECT_REQUEST_TOKEN, side_effect=None), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} - await hass.async_block_till_done() - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/tractive/test_device_tracker.py b/tests/components/tractive/test_device_tracker.py index ff78173ef7b..ff9c7ca88ef 100644 --- a/tests/components/tractive/test_device_tracker.py +++ b/tests/components/tractive/test_device_tracker.py @@ -59,3 +59,31 @@ async def test_source_type_phone( hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] is SourceType.BLUETOOTH ) + + +async def test_source_type_gps( + hass: HomeAssistant, + mock_tractive_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test if the source type is GPS when the location sensor is KNOWN WIFI.""" + await init_integration(hass, mock_config_entry) + + mock_tractive_client.send_position_event( + mock_config_entry, + { + "tracker_id": "device_id_123", + "position": { + "latlong": [22.333, 44.555], + "accuracy": 99, + "sensor_used": "KNOWN_WIFI", + }, + }, + ) + mock_tractive_client.send_hardware_event(mock_config_entry) + await hass.async_block_till_done() + + assert ( + hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] + is SourceType.GPS + ) diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 921cab4cba2..99c698771f7 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -14,9 +14,11 @@ import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.tts import ( CONF_LANG, + DATA_TTS_MANAGER, DOMAIN as TTS_DOMAIN, PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, + ResultStream, TextToSpeechEntity, TtsAudioType, Voice, @@ -263,3 +265,33 @@ async def mock_config_entry_setup( await hass.async_block_till_done() return config_entry + + +class MockResultStream(ResultStream): + """Mock result stream.""" + + test_set_message: str | None = None + + def __init__(self, hass: HomeAssistant, extension: str, data: bytes) -> None: + """Initialize the result stream.""" + super().__init__( + token="test-token", + extension=extension, + content_type=f"audio/mock-{extension}", + engine="test-engine", + use_file_cache=True, + language="en", + options={}, + _manager=hass.data[DATA_TTS_MANAGER], + ) + hass.data[DATA_TTS_MANAGER].token_to_stream[self.token] = self + self._mock_data = data + + @callback + def async_set_message(self, message: str) -> None: + """Set message to be generated.""" + self.test_set_message = message + + async def async_stream_result(self): + """Stream the result.""" + yield self._mock_data diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 1b9692cc70c..4e17bc68a5e 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -28,6 +28,7 @@ from homeassistant.util import dt as dt_util from .common import ( DEFAULT_LANG, TEST_DOMAIN, + MockResultStream, MockTTS, MockTTSEntity, MockTTSProvider, @@ -167,7 +168,7 @@ async def test_service( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -229,7 +230,7 @@ async def test_service_default_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -293,7 +294,7 @@ async def test_service_default_special_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -353,7 +354,7 @@ async def test_service_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" @@ -469,7 +470,7 @@ async def test_service_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -553,7 +554,7 @@ async def test_service_default_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -627,7 +628,7 @@ async def test_merge_default_service_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -742,7 +743,7 @@ async def test_service_clear_cache( # To make sure the file is persisted assert len(calls) == 1 await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -1768,9 +1769,15 @@ async def test_async_convert_audio_error(hass: HomeAssistant) -> None: """Test that ffmpeg failing during audio conversion will raise an error.""" assert await async_setup_component(hass, ffmpeg.DOMAIN, {}) + async def bad_data_gen(): + yield bytes(0) + with pytest.raises(RuntimeError): # Simulate a bad WAV file - await tts.async_convert_audio(hass, "wav", bytes(0), "mp3") + async for _chunk in tts._async_convert_audio( + hass, "wav", bad_data_gen(), "mp3" + ): + pass async def test_default_engine_prefer_entity( @@ -1829,3 +1836,102 @@ async def test_default_engine_prefer_cloud_entity( provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" + + +async def test_stream(hass: HomeAssistant, mock_tts_entity: MockTTSEntity) -> None: + """Test creating streams.""" + await mock_config_entry_setup(hass, mock_tts_entity) + stream = tts.async_create_stream(hass, mock_tts_entity.entity_id) + assert stream.language == mock_tts_entity.default_language + assert stream.options == (mock_tts_entity.default_options or {}) + assert tts.async_get_stream(hass, stream.token) is stream + + data = b"beer" + stream2 = MockResultStream(hass, "wav", data) + assert tts.async_get_stream(hass, stream2.token) is stream2 + assert stream2.extension == "wav" + result_data = b"".join([chunk async for chunk in stream2.async_stream_result()]) + assert result_data == data + + +async def test_tts_cache() -> None: + """Test TTSCache.""" + + async def data_gen(queue: asyncio.Queue[bytes | None | Exception]): + while chunk := await queue.get(): + if isinstance(chunk, Exception): + raise chunk + yield chunk + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + assert cache.cache_key == "test-key" + assert cache.extension == "mp3" + + for i in range(10): + queue.put_nowait(f"{i}".encode()) + queue.put_nowait(None) + + assert await cache.async_load_data() == b"0123456789" + + with pytest.raises(RuntimeError): + await cache.async_load_data() + + # When data is loaded, we get it all in 1 chunk + cur = 0 + async for chunk in cache.async_stream_data(): + assert chunk == b"0123456789" + cur += 1 + assert cur == 1 + + # Show we can stream the data while it's still being generated + async def consume_cache(cache: tts.TTSCache): + return b"".join([chunk async for chunk in cache.async_stream_data()]) + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + + load_data_task = asyncio.create_task(cache.async_load_data()) + consume_pre_data_loaded_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"0") + await asyncio.sleep(0) + queue.put_nowait(b"1") + await asyncio.sleep(0) + consume_mid_data_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"2") + await asyncio.sleep(0) + queue.put_nowait(None) + consume_post_data_loaded_task = asyncio.create_task(consume_cache(cache)) + await asyncio.sleep(0) + assert await load_data_task == b"012" + assert await consume_post_data_loaded_task == b"012" + assert await consume_mid_data_task == b"012" + assert await consume_pre_data_loaded_task == b"012" + + # Now with errors + async def consume_cache(cache: tts.TTSCache): + return b"".join([chunk async for chunk in cache.async_stream_data()]) + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + + load_data_task = asyncio.create_task(cache.async_load_data()) + consume_pre_data_loaded_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"0") + await asyncio.sleep(0) + queue.put_nowait(b"1") + await asyncio.sleep(0) + consume_mid_data_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(ValueError("Boom!")) + await asyncio.sleep(0) + queue.put_nowait(None) + consume_post_data_loaded_task = asyncio.create_task(consume_cache(cache)) + await asyncio.sleep(0) + with pytest.raises(ValueError): + assert await load_data_task == b"012" + with pytest.raises(ValueError): + assert await consume_post_data_loaded_task == b"012" + with pytest.raises(ValueError): + assert await consume_mid_data_task == b"012" + with pytest.raises(ValueError): + assert await consume_pre_data_loaded_task == b"012" diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index ee714624b45..36d658f9633 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -59,43 +59,30 @@ def mock_controller_connection_failed(): @pytest.mark.usefixtures("controller") -async def test_user_network_succes(hass: HomeAssistant) -> None: - """Test user network config.""" - # inttial menu show - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result - assert result.get("flow_id") - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "user" - assert result.get("menu_options") == ["network", "usbselect"] - # select the network option - result = await hass.config_entries.flow.async_configure( - result.get("flow_id"), - {"next_step_id": "network"}, - ) - assert result.get("type") is FlowResultType.FORM - # fill in the network form - result = await hass.config_entries.flow.async_configure( - result.get("flow_id"), - { - CONF_TLS: False, - CONF_HOST: "velbus", - CONF_PORT: 6000, - CONF_PASSWORD: "", - }, - ) - assert result - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == "Velbus Network" - data = result.get("data") - assert data - assert data[CONF_PORT] == "velbus:6000" - - -@pytest.mark.usefixtures("controller") -async def test_user_network_succes_tls(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("inputParams", "expected"), + [ + ( + { + CONF_TLS: True, + CONF_PASSWORD: "password", + }, + "tls://password@velbus:6000", + ), + ( + { + CONF_TLS: True, + CONF_PASSWORD: "", + }, + "tls://velbus:6000", + ), + ({CONF_TLS: True}, "tls://velbus:6000"), + ({CONF_TLS: False}, "velbus:6000"), + ], +) +async def test_user_network_succes( + hass: HomeAssistant, inputParams: str, expected: str +) -> None: """Test user network config.""" # inttial menu show result = await hass.config_entries.flow.async_init( @@ -116,10 +103,9 @@ async def test_user_network_succes_tls(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result.get("flow_id"), { - CONF_TLS: True, CONF_HOST: "velbus", CONF_PORT: 6000, - CONF_PASSWORD: "password", + **inputParams, }, ) assert result @@ -127,7 +113,7 @@ async def test_user_network_succes_tls(hass: HomeAssistant) -> None: assert result.get("title") == "Velbus Network" data = result.get("data") assert data - assert data[CONF_PORT] == "tls://password@velbus:6000" + assert data[CONF_PORT] == expected @pytest.mark.usefixtures("controller") diff --git a/tests/components/vesync/test_select.py b/tests/components/vesync/test_select.py index 30c83c89e0e..c96d687dfd2 100644 --- a/tests/components/vesync/test_select.py +++ b/tests/components/vesync/test_select.py @@ -7,8 +7,10 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.vesync.const import NIGHT_LIGHT_LEVEL_DIM -from homeassistant.components.vesync.select import HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP +from homeassistant.components.vesync.const import HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM +from homeassistant.components.vesync.select import ( + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP, +) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -18,24 +20,24 @@ from .common import ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT @pytest.mark.parametrize( "install_humidifier_device", ["humidifier_300s"], indirect=True ) -async def test_set_nightlight_level( +async def test_humidifier_set_nightlight_level( hass: HomeAssistant, manager, humidifier_300s, install_humidifier_device ) -> None: - """Test set of night light level.""" + """Test set of humidifier night light level.""" await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT, - ATTR_OPTION: NIGHT_LIGHT_LEVEL_DIM, + ATTR_OPTION: HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, }, blocking=True, ) # Assert that setter API was invoked with the expected translated value humidifier_300s.set_night_light_brightness.assert_called_once_with( - HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP[NIGHT_LIGHT_LEVEL_DIM] + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP[HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM] ) # Assert that devices were refreshed manager.update_all_devices.assert_called_once() @@ -44,11 +46,13 @@ async def test_set_nightlight_level( @pytest.mark.parametrize( "install_humidifier_device", ["humidifier_300s"], indirect=True ) -async def test_nightlight_level(hass: HomeAssistant, install_humidifier_device) -> None: - """Test the state of night light level select entity.""" +async def test_humidifier_nightlight_level( + hass: HomeAssistant, install_humidifier_device +) -> None: + """Test the state of humidifier night light level select entity.""" # The mocked device has night_light_brightness=50 which is "dim" assert ( hass.states.get(ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT).state - == NIGHT_LIGHT_LEVEL_DIM + == HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM ) diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py index d5f377d3f6f..ade5eb78965 100644 --- a/tests/components/vodafone_station/test_button.py +++ b/tests/components/vodafone_station/test_button.py @@ -2,11 +2,20 @@ from unittest.mock import AsyncMock, patch +from aiovodafone.exceptions import ( + AlreadyLogged, + CannotAuthenticate, + CannotConnect, + GenericLoginError, +) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -46,3 +55,39 @@ async def test_pressing_button( blocking=True, ) mock_vodafone_station_router.restart_router.assert_called_once() + + +@pytest.mark.parametrize( + ("side_effect", "key", "error"), + [ + (CannotConnect, "cannot_execute_action", "CannotConnect()"), + (AlreadyLogged, "cannot_execute_action", "AlreadyLogged()"), + (GenericLoginError, "cannot_execute_action", "GenericLoginError()"), + (CannotAuthenticate, "cannot_authenticate", "CannotAuthenticate()"), + ], +) +async def test_button_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + key: str, + error: str, +) -> None: + """Test button action fails.""" + + await setup_integration(hass, mock_config_entry) + + mock_vodafone_station_router.restart_router.side_effect = side_effect + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.vodafone_station_m123456789_restart"}, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == key + assert exc_info.value.translation_placeholders == {"error": error} diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 68f8247bdf9..0648987eb27 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -228,3 +228,75 @@ async def test_options_flow( assert result["data"] == { CONF_CONSIDER_HOME: 37, } + + +async def test_reconfigure_successful( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # original entry + assert mock_config_entry.data["host"] == "fake_host" + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.ABORT + assert reconfigure_result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data["host"] == "192.168.100.60" + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (CannotConnect, "cannot_connect"), + (CannotAuthenticate, "invalid_auth"), + (AlreadyLogged, "already_logged"), + (ConnectionResetError, "unknown"), + ], +) +async def test_reconfigure_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + error: str, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + mock_vodafone_station_router.login.side_effect = side_effect + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + assert reconfigure_result["errors"] == {"base": error} diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index d971591c79a..459ab020336 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -27,6 +27,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import EntityComponent from homeassistant.setup import async_setup_component +from tests.components.tts.common import MockResultStream + _ONE_SECOND = 16000 * 2 # 16Khz 16-bit _MEDIA_ID = "12345" @@ -879,6 +881,7 @@ async def test_announce( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -926,6 +929,7 @@ async def test_voip_id_is_ip_address( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -978,6 +982,7 @@ async def test_announce_timeout( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -1018,6 +1023,7 @@ async def test_start_conversation( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -1162,8 +1168,16 @@ async def test_start_conversation_user_doesnt_pick_up( new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", - return_value="test media id", + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="test tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), ): satellite.transport = Mock() diff --git a/tests/components/watergate/snapshots/test_event.ambr b/tests/components/watergate/snapshots/test_event.ambr new file mode 100644 index 00000000000..97f453697ca --- /dev/null +++ b/tests/components/watergate/snapshots/test_event.ambr @@ -0,0 +1,111 @@ +# serializer version: 1 +# name: test_event[event.sonic_duration_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'duration_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Duration auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_duration', + 'unique_id': 'a63182948ce2896a.auto_shut_off_duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_duration_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'duration_threshold', + ]), + 'friendly_name': 'Sonic Duration auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'volume_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_volume', + 'unique_id': 'a63182948ce2896a.auto_shut_off_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'volume_threshold', + ]), + 'friendly_name': 'Sonic Volume auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/watergate/test_event.py b/tests/components/watergate/test_event.py new file mode 100644 index 00000000000..6997c3f1fdf --- /dev/null +++ b/tests/components/watergate/test_event.py @@ -0,0 +1,84 @@ +"""Tests for the Watergate event entity platform.""" + +from collections.abc import Generator + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.typing import StateType + +from . import init_integration +from .const import MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, MockConfigEntry, patch, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_event( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the sensor.""" + freezer.move_to("2021-01-09 12:00:00+00:00") + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.EVENT]): + await init_integration(hass, mock_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "event_type"), + [ + ("sonic_volume_auto_shut_off", "volume_threshold"), + ("sonic_duration_auto_shut_off", "duration_threshold"), + ], +) +async def test_auto_shut_off_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + entity_id: str, + event_type: str, +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(f"event.{entity_id}") + assert state.state == str(expected_state) + + assert_state(entity_id, "unknown") + + telemetry_change_data = { + "type": "auto-shut-off-report", + "data": { + "type": event_type, + "volume": 1500, + "duration": 30, + "timestamp": 1730148016, + }, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=telemetry_change_data) + + await hass.async_block_till_done() + + def assert_extra_state( + entity_id: str, attribute: str, expected_attribute: StateType + ): + attributes = hass.states.get(f"event.{entity_id}").attributes + assert attributes.get(attribute) == expected_attribute + + assert_extra_state(entity_id, "event_type", event_type) + assert_extra_state(entity_id, "volume", 1500) + assert_extra_state(entity_id, "duration", 30) diff --git a/tests/components/watergate/test_sensor.py b/tests/components/watergate/test_sensor.py index 78e375857ed..0bf883a1955 100644 --- a/tests/components/watergate/test_sensor.py +++ b/tests/components/watergate/test_sensor.py @@ -1,4 +1,4 @@ -"""Tests for the Watergate valve platform.""" +"""Tests for the Watergate sensor platform.""" from collections.abc import Generator diff --git a/tests/components/webdav/__init__.py b/tests/components/webdav/__init__.py index 33e0222fb34..3b901bdd308 100644 --- a/tests/components/webdav/__init__.py +++ b/tests/components/webdav/__init__.py @@ -1 +1,14 @@ """Tests for the WebDAV integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the WebDAV integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/webdav/conftest.py b/tests/components/webdav/conftest.py index 4fdd6fb7870..5fa972e5fae 100644 --- a/tests/components/webdav/conftest.py +++ b/tests/components/webdav/conftest.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components.webdav.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME -from .const import BACKUP_METADATA, MOCK_LIST_WITH_PROPERTIES +from .const import BACKUP_METADATA, MOCK_LIST_FILES from tests.common import MockConfigEntry @@ -58,8 +58,9 @@ def mock_webdav_client() -> Generator[AsyncMock]: mock = mock_webdav_client.return_value mock.check.return_value = True mock.mkdir.return_value = True - mock.list_with_properties.return_value = MOCK_LIST_WITH_PROPERTIES + mock.list_files.return_value = MOCK_LIST_FILES mock.download_iter.side_effect = _download_mock mock.upload_iter.return_value = None mock.clean.return_value = None + mock.move.return_value = None yield mock diff --git a/tests/components/webdav/const.py b/tests/components/webdav/const.py index 8d6b8ad67d7..0147826a777 100644 --- a/tests/components/webdav/const.py +++ b/tests/components/webdav/const.py @@ -1,7 +1,5 @@ """Constants for WebDAV tests.""" -from aiowebdav2 import Property - BACKUP_METADATA = { "addons": [], "backup_id": "23e64aec", @@ -16,18 +14,7 @@ BACKUP_METADATA = { "size": 34519040, } -MOCK_LIST_WITH_PROPERTIES = { - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.tar": [], - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json": [ - Property( - namespace="https://home-assistant.io", - name="backup_id", - value="23e64aec", - ), - Property( - namespace="https://home-assistant.io", - name="metadata_version", - value="1", - ), - ], -} +MOCK_LIST_FILES = [ + "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.tar", + "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json", +] diff --git a/tests/components/webdav/test_backup.py b/tests/components/webdav/test_backup.py index c20e73cc786..ca20467484f 100644 --- a/tests/components/webdav/test_backup.py +++ b/tests/components/webdav/test_backup.py @@ -6,7 +6,6 @@ from collections.abc import AsyncGenerator from io import StringIO from unittest.mock import Mock, patch -from aiowebdav2 import Property from aiowebdav2.exceptions import UnauthorizedError, WebDavError import pytest @@ -17,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.backup import async_initialize_backup from homeassistant.setup import async_setup_component -from .const import BACKUP_METADATA, MOCK_LIST_WITH_PROPERTIES +from .const import BACKUP_METADATA from tests.common import AsyncMock, MockConfigEntry from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -184,7 +183,6 @@ async def test_agents_upload( assert resp.status == 201 assert webdav_client.upload_iter.call_count == 2 - assert webdav_client.set_property_batch.call_count == 1 async def test_agents_download( @@ -211,7 +209,7 @@ async def test_error_on_agents_download( """Test we get not found on a not existing backup on download.""" client = await hass_client() backup_id = BACKUP_METADATA["backup_id"] - webdav_client.list_with_properties.side_effect = [MOCK_LIST_WITH_PROPERTIES, {}] + webdav_client.list_files.return_value = [] resp = await client.get( f"/api/backup/download/{backup_id}?agent_id={DOMAIN}.{mock_config_entry.entry_id}" @@ -262,7 +260,7 @@ async def test_agents_delete_not_found_does_not_throw( webdav_client: AsyncMock, ) -> None: """Test agent delete backup.""" - webdav_client.list_with_properties.return_value = {} + webdav_client.list_files.return_value = {} client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -283,7 +281,7 @@ async def test_agents_backup_not_found( webdav_client: AsyncMock, ) -> None: """Test backup not found.""" - webdav_client.list_with_properties.return_value = [] + webdav_client.list_files.return_value = [] backup_id = BACKUP_METADATA["backup_id"] client = await hass_ws_client(hass) await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) @@ -300,7 +298,7 @@ async def test_raises_on_403( mock_config_entry: MockConfigEntry, ) -> None: """Test we raise on 403.""" - webdav_client.list_with_properties.side_effect = UnauthorizedError( + webdav_client.list_files.side_effect = UnauthorizedError( "https://webdav.example.com" ) backup_id = BACKUP_METADATA["backup_id"] @@ -324,30 +322,3 @@ async def test_listeners_get_cleaned_up(hass: HomeAssistant) -> None: remove_listener() assert hass.data.get(DATA_BACKUP_AGENT_LISTENERS) is None - - -async def test_metadata_misses_backup_id( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - webdav_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test getting a backup when metadata has backup id property.""" - MOCK_LIST_WITH_PROPERTIES[ - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json" - ] = [ - Property( - namespace="homeassistant", - name="metadata_version", - value="1", - ) - ] - webdav_client.list_with_properties.return_value = MOCK_LIST_WITH_PROPERTIES - - backup_id = BACKUP_METADATA["backup_id"] - client = await hass_ws_client(hass) - await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) - response = await client.receive_json() - - assert response["success"] - assert response["result"]["backup"] is None diff --git a/tests/components/webdav/test_config_flow.py b/tests/components/webdav/test_config_flow.py index eb887edb1a1..9204e6eadab 100644 --- a/tests/components/webdav/test_config_flow.py +++ b/tests/components/webdav/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiowebdav2.exceptions import UnauthorizedError +from aiowebdav2.exceptions import MethodNotSupportedError, UnauthorizedError import pytest from homeassistant import config_entries @@ -86,6 +86,7 @@ async def test_form_fail(hass: HomeAssistant, webdav_client: AsyncMock) -> None: ("exception", "expected_error"), [ (UnauthorizedError("https://webdav.demo"), "invalid_auth"), + (MethodNotSupportedError("check", "https://webdav.demo"), "invalid_method"), (Exception("Unexpected error"), "unknown"), ], ) diff --git a/tests/components/webdav/test_init.py b/tests/components/webdav/test_init.py new file mode 100644 index 00000000000..124a644fa93 --- /dev/null +++ b/tests/components/webdav/test_init.py @@ -0,0 +1,112 @@ +"""Test WebDAV component setup.""" + +from unittest.mock import AsyncMock + +from aiowebdav2.exceptions import WebDavError +import pytest + +from homeassistant.components.webdav.const import CONF_BACKUP_PATH, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_migrate_wrong_path( + hass: HomeAssistant, webdav_client: AsyncMock +) -> None: + """Test migration of wrong encoded folder path.""" + webdav_client.list_with_properties.return_value = [ + {"/wrong%20path": []}, + ] + + config_entry = MockConfigEntry( + title="user@webdav.demo", + domain=DOMAIN, + data={ + CONF_URL: "https://webdav.demo", + CONF_USERNAME: "user", + CONF_PASSWORD: "supersecretpassword", + CONF_BACKUP_PATH: "/wrong path", + }, + entry_id="01JKXV07ASC62D620DGYNG2R8H", + ) + await setup_integration(hass, config_entry) + + webdav_client.move.assert_called_once_with("/wrong%20path", "/wrong path") + + +@pytest.mark.parametrize( + ("expected_path", "remote_path_check"), + [ + ( + "/correct path", + False, + ), # remote_path_check is False as /correct%20path is not there + ("/", True), + ("/folder_with_underscores", True), + ], +) +async def test_migrate_non_wrong_path( + hass: HomeAssistant, + webdav_client: AsyncMock, + expected_path: str, + remote_path_check: bool, +) -> None: + """Test no migration of correct folder path.""" + webdav_client.list_with_properties.return_value = [ + {expected_path: []}, + ] + # first return is used to check the connectivity + # second is used in the migration to determine if wrong quoted path is there + webdav_client.check.side_effect = [True, remote_path_check] + + config_entry = MockConfigEntry( + title="user@webdav.demo", + domain=DOMAIN, + data={ + CONF_URL: "https://webdav.demo", + CONF_USERNAME: "user", + CONF_PASSWORD: "supersecretpassword", + CONF_BACKUP_PATH: expected_path, + }, + entry_id="01JKXV07ASC62D620DGYNG2R8H", + ) + + await setup_integration(hass, config_entry) + + webdav_client.move.assert_not_called() + + +async def test_migrate_error( + hass: HomeAssistant, + webdav_client: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration of wrong encoded folder path with error.""" + webdav_client.list_with_properties.return_value = [ + {"/wrong%20path": []}, + ] + webdav_client.move.side_effect = WebDavError("Failed to move") + + config_entry = MockConfigEntry( + title="user@webdav.demo", + domain=DOMAIN, + data={ + CONF_URL: "https://webdav.demo", + CONF_USERNAME: "user", + CONF_PASSWORD: "supersecretpassword", + CONF_BACKUP_PATH: "/wrong path", + }, + entry_id="01JKXV07ASC62D620DGYNG2R8H", + ) + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + assert ( + 'Failed to migrate wrong encoded folder "/wrong%20path" to "/wrong path"' + in caplog.text + ) diff --git a/tests/components/webmin/snapshots/test_sensor.ambr b/tests/components/webmin/snapshots/test_sensor.ambr index a2068f662ba..1af5fe46b5c 100644 --- a/tests/components/webmin/snapshots/test_sensor.ambr +++ b/tests/components/webmin/snapshots/test_sensor.ambr @@ -1451,7 +1451,7 @@ 'state': '8794.3125', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_15m-entry] +# name: test_sensor[sensor.192_168_1_1_load_15_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1466,7 +1466,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_15m', + 'entity_id': 'sensor.192_168_1_1_load_15_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1478,7 +1478,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (15m)', + 'original_name': 'Load (15 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1487,21 +1487,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_15m-state] +# name: test_sensor[sensor.192_168_1_1_load_15_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (15m)', + 'friendly_name': '192.168.1.1 Load (15 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_15m', + 'entity_id': 'sensor.192_168_1_1_load_15_min', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.37', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_1m-entry] +# name: test_sensor[sensor.192_168_1_1_load_1_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1516,7 +1516,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_1m', + 'entity_id': 'sensor.192_168_1_1_load_1_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1528,7 +1528,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (1m)', + 'original_name': 'Load (1 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1537,21 +1537,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_1m-state] +# name: test_sensor[sensor.192_168_1_1_load_1_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (1m)', + 'friendly_name': '192.168.1.1 Load (1 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_1m', + 'entity_id': 'sensor.192_168_1_1_load_1_min', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.29', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_5m-entry] +# name: test_sensor[sensor.192_168_1_1_load_5_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1566,7 +1566,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_5m', + 'entity_id': 'sensor.192_168_1_1_load_5_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1578,7 +1578,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (5m)', + 'original_name': 'Load (5 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1587,14 +1587,14 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_5m-state] +# name: test_sensor[sensor.192_168_1_1_load_5_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (5m)', + 'friendly_name': '192.168.1.1 Load (5 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_5m', + 'entity_id': 'sensor.192_168_1_1_load_5_min', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 0d99b0596e3..97d6fde6376 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -175,6 +175,94 @@ 'version': 1, }) # --- +# name: test_full_flow_with_error[WhoisPrivateRegistry-private_registry] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- +# name: test_full_flow_with_error[WhoisQuotaExceeded-quota_exceeded] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- # name: test_full_user_flow FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/whois/test_config_flow.py b/tests/components/whois/test_config_flow.py index 35e40c4e809..6ab02887be2 100644 --- a/tests/components/whois/test_config_flow.py +++ b/tests/components/whois/test_config_flow.py @@ -9,6 +9,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.components.whois.const import DOMAIN @@ -52,6 +54,8 @@ async def test_full_user_flow( (FailedParsingWhoisOutput, "unexpected_response"), (UnknownDateFormat, "unknown_date_format"), (WhoisCommandFailed, "whois_command_failed"), + (WhoisPrivateRegistry, "private_registry"), + (WhoisQuotaExceeded, "quota_exceeded"), ], ) async def test_full_flow_with_error( diff --git a/tests/components/withings/snapshots/test_sensor.ambr b/tests/components/withings/snapshots/test_sensor.ambr index ec9fc1ed3fc..f735c506f65 100644 --- a/tests/components/withings/snapshots/test_sensor.ambr +++ b/tests/components/withings/snapshots/test_sensor.ambr @@ -503,6 +503,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -512,7 +515,7 @@ 'supported_features': 0, 'translation_key': 'deep_sleep', 'unique_id': 'withings_12345_sleep_deep_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_deep_sleep-state] @@ -521,14 +524,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Deep sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_deep_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5820', + 'state': '1.617', }) # --- # name: test_all_entities[sensor.henk_diastolic_blood_pressure-entry] @@ -1778,7 +1781,7 @@ 'name': None, 'options': dict({ 'sensor': dict({ - 'suggested_display_precision': 1, + 'suggested_display_precision': 2, }), }), 'original_device_class': , @@ -2242,6 +2245,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2251,7 +2257,7 @@ 'supported_features': 0, 'translation_key': 'light_sleep', 'unique_id': 'withings_12345_sleep_light_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_light_sleep-state] @@ -2260,14 +2266,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Light sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_light_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10440', + 'state': '2.900', }) # --- # name: test_all_entities[sensor.henk_maximum_heart_rate-entry] @@ -2988,6 +2994,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2997,7 +3006,7 @@ 'supported_features': 0, 'translation_key': 'rem_sleep', 'unique_id': 'withings_12345_sleep_rem_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_rem_sleep-state] @@ -3006,14 +3015,14 @@ 'device_class': 'duration', 'friendly_name': 'henk REM sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_rem_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2400', + 'state': '0.667', }) # --- # name: test_all_entities[sensor.henk_skin_temperature-entry] @@ -3616,6 +3625,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3625,7 +3637,7 @@ 'supported_features': 0, 'translation_key': 'time_to_sleep', 'unique_id': 'withings_12345_sleep_tosleep_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_time_to_sleep-state] @@ -3634,14 +3646,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Time to sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_time_to_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '540', + 'state': '0.150', }) # --- # name: test_all_entities[sensor.henk_time_to_wakeup-entry] @@ -3668,6 +3680,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3677,7 +3692,7 @@ 'supported_features': 0, 'translation_key': 'time_to_wakeup', 'unique_id': 'withings_12345_sleep_towakeup_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_time_to_wakeup-state] @@ -3686,14 +3701,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Time to wakeup', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_time_to_wakeup', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1140', + 'state': '0.317', }) # --- # name: test_all_entities[sensor.henk_total_calories_burnt_today-entry] @@ -3971,6 +3986,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3980,7 +3998,7 @@ 'supported_features': 0, 'translation_key': 'wakeup_time', 'unique_id': 'withings_12345_sleep_wakeup_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_wakeup_time-state] @@ -3989,14 +4007,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Wakeup time', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_wakeup_time', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3060', + 'state': '0.850', }) # --- # name: test_all_entities[sensor.henk_weight-entry] diff --git a/tests/components/wolflink/__init__.py b/tests/components/wolflink/__init__.py index dea7c5195ad..11c82ad9f61 100644 --- a/tests/components/wolflink/__init__.py +++ b/tests/components/wolflink/__init__.py @@ -1 +1,14 @@ """Tests for the Wolf SmartSet Service integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the wolflink integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/wolflink/conftest.py b/tests/components/wolflink/conftest.py new file mode 100644 index 00000000000..5142762b5e4 --- /dev/null +++ b/tests/components/wolflink/conftest.py @@ -0,0 +1,125 @@ +"""Fixtures for Wolflink integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +import pytest +from wolf_comm import ( + EnergyParameter, + FlowParameter, + FrequencyParameter, + HoursParameter, + ListItem, + ListItemParameter, + PercentageParameter, + PowerParameter, + Pressure, + RPMParameter, + SimpleParameter, + Temperature, + Value, +) + +from homeassistant.components.wolflink.const import ( + DEVICE_GATEWAY, + DEVICE_ID, + DEVICE_NAME, + DOMAIN, +) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Wolf SmartSet", + domain=DOMAIN, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + DEVICE_NAME: "test-device", + DEVICE_GATEWAY: "5678", + DEVICE_ID: "1234", + }, + unique_id="1234", + version=1, + minor_version=2, + ) + + +@pytest.fixture +def mock_wolflink() -> Generator[MagicMock]: + """Return a mocked wolflink client.""" + with ( + patch( + "homeassistant.components.wolflink.WolfClient", autospec=True + ) as wolflink_mock, + patch( + "homeassistant.components.wolflink.config_flow.WolfClient", + new=wolflink_mock, + ), + ): + wolflink = wolflink_mock.return_value + + wolflink.fetch_parameters.return_value = [ + EnergyParameter( + 6002800000, "Energy Parameter", "Heating", 6005200000, 2000 + ), + ListItemParameter( + 8002800000, + "List Item Parameter", + "Heating", + [ListItem("0", "Aus"), ListItem("1", "Ein")], + 8005200000, + 3001, + ), + PowerParameter(5002800000, "Power Parameter", "Heating", 5005200000, 1000), + Pressure(4002800000, "Pressure Parameter", "Heating", 4005200000, 1000), + Temperature(3002800000, "Temperature Parameter", "Solar", 3005200000, 1000), + PercentageParameter( + 2002800000, "Percentage Parameter", "Solar", 2005200000, 1000 + ), + HoursParameter(7002800000, "Hours Parameter", "Heating", 7005200000, 1000), + SimpleParameter(1002800000, "Simple Parameter", "DHW", 1005200000, 1000), + FrequencyParameter( + 9002800000, "Frequency Parameter", "Heating", 9005200000, 1000 + ), + RPMParameter(1000280001, "RPM Parameter", "Heating", 10005200000, 7000), + FlowParameter(1100280001, "Flow Parameter", "Heating", 11005200000, 8000), + HoursParameter(7002800000, "Hours Parameter", "Heating", 7005200000, 1000), + SimpleParameter(1002800000, "Simple Parameter", "DHW", 1005200000, 1000), + ] + + wolflink.fetch_value.return_value = [ + Value(6002800000, "183", 1), + Value(8002800000, "1", 1), + Value(5002800000, "50", 1), + Value(4002800000, "3", 1), + Value(3002800000, "65", 1), + Value(2002800000, "20", 1), + Value(7002800000, "10", 1), + Value(1002800000, "12", 1), + Value(9002800000, "50", 1), + Value(1000280001, "1500", 1), + Value(1100280001, "5", 1), + ] + + yield wolflink + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_wolflink: MagicMock +) -> MockConfigEntry: + """Set up the Wolflink integration for testing.""" + await setup_integration(hass, mock_config_entry) + + return mock_config_entry diff --git a/tests/components/wolflink/snapshots/test_sensor.ambr b/tests/components/wolflink/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c1ff80c9630 --- /dev/null +++ b/tests/components/wolflink/snapshots/test_sensor.ambr @@ -0,0 +1,603 @@ +# serializer version: 1 +# name: test_device_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://www.wolf-smartset.com/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wolflink', + '1234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WOLF GmbH', + 'model': None, + 'model_id': None, + 'name': 'Wolf SmartSet', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.energy_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:6005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Parameter', + 'parameter_id': 6005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 6002800000, + }), + 'context': , + 'entity_id': 'sensor.energy_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '183', + }) +# --- +# name: test_sensors[sensor.flow_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.flow_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flow Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:11005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.flow_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Flow Parameter', + 'parameter_id': 11005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 1100280001, + }), + 'context': , + 'entity_id': 'sensor.flow_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_sensors[sensor.frequency_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frequency_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:9005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.frequency_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Frequency Parameter', + 'parameter_id': 9005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 9002800000, + }), + 'context': , + 'entity_id': 'sensor.frequency_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.hours_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hours_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:clock', + 'original_name': 'Hours Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:7005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.hours_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hours Parameter', + 'icon': 'mdi:clock', + 'parameter_id': 7005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 7002800000, + }), + 'context': , + 'entity_id': 'sensor.hours_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[sensor.list_item_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.list_item_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'List Item Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': '1234:8005200000', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.list_item_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'List Item Parameter', + 'parameter_id': 8005200000, + 'parent': 'Heating', + 'value_id': 8002800000, + }), + 'context': , + 'entity_id': 'sensor.list_item_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ein', + }) +# --- +# name: test_sensors[sensor.percentage_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.percentage_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Percentage Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:2005200000', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.percentage_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Percentage Parameter', + 'parameter_id': 2005200000, + 'parent': 'Solar', + 'unit_of_measurement': '%', + 'value_id': 2002800000, + }), + 'context': , + 'entity_id': 'sensor.percentage_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensors[sensor.power_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.power_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:5005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.power_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Power Parameter', + 'parameter_id': 5005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 5002800000, + }), + 'context': , + 'entity_id': 'sensor.power_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.pressure_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pressure_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:4005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pressure_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Pressure Parameter', + 'parameter_id': 4005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 4002800000, + }), + 'context': , + 'entity_id': 'sensor.pressure_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_sensors[sensor.rpm_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rpm_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RPM Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:10005200000', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_sensors[sensor.rpm_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RPM Parameter', + 'parameter_id': 10005200000, + 'parent': 'Heating', + 'state_class': , + 'unit_of_measurement': 'rpm', + 'value_id': 1000280001, + }), + 'context': , + 'entity_id': 'sensor.rpm_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1500', + }) +# --- +# name: test_sensors[sensor.simple_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.simple_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Simple Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:1005200000', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.simple_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Simple Parameter', + 'parameter_id': 1005200000, + 'parent': 'DHW', + 'value_id': 1002800000, + }), + 'context': , + 'entity_id': 'sensor.simple_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_sensors[sensor.temperature_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.temperature_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:3005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.temperature_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Temperature Parameter', + 'parameter_id': 3005200000, + 'parent': 'Solar', + 'unit_of_measurement': , + 'value_id': 3002800000, + }), + 'context': , + 'entity_id': 'sensor.temperature_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '65', + }) +# --- diff --git a/tests/components/wolflink/test_sensor.py b/tests/components/wolflink/test_sensor.py new file mode 100644 index 00000000000..8fc78f707d5 --- /dev/null +++ b/tests/components/wolflink/test_sensor.py @@ -0,0 +1,45 @@ +"""Test the Wolf SmartSet Service Sensor platform.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, patch, snapshot_platform + + +async def test_device_entry( + hass: HomeAssistant, + mock_wolflink: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device entry creation.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device = device_registry.async_get_device({(mock_config_entry.domain, "1234")}) + assert device == snapshot + + +async def test_sensors( + hass: HomeAssistant, + mock_wolflink: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test wolflink sensors.""" + + with patch("homeassistant.components.wolflink.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 018fff33821..125edc547c6 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -121,7 +121,9 @@ def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: @pytest.fixture -async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): +async def init_wyoming_stt( + hass: HomeAssistant, stt_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming STT.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -129,9 +131,13 @@ async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(stt_config_entry.entry_id) + return stt_config_entry + @pytest.fixture -async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): +async def init_wyoming_tts( + hass: HomeAssistant, tts_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming TTS.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -139,11 +145,13 @@ async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(tts_config_entry.entry_id) + return tts_config_entry + @pytest.fixture async def init_wyoming_wake_word( hass: HomeAssistant, wake_word_config_entry: ConfigEntry -): +) -> ConfigEntry: """Initialize Wyoming Wake Word.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -151,6 +159,8 @@ async def init_wyoming_wake_word( ): await hass.config_entries.async_setup(wake_word_config_entry.entry_id) + return wake_word_config_entry + @pytest.fixture async def init_wyoming_intent( diff --git a/tests/components/wyoming/test_tts.py b/tests/components/wyoming/test_tts.py index 263804787b1..6e0edc022c0 100644 --- a/tests/components/wyoming/test_tts.py +++ b/tests/components/wyoming/test_tts.py @@ -150,17 +150,15 @@ async def test_get_tts_audio_connection_lost( hass: HomeAssistant, init_wyoming_tts ) -> None: """Test streaming audio and losing connection.""" - with ( - patch( - "homeassistant.components.wyoming.tts.AsyncTcpClient", - MockAsyncTcpClient([None]), - ), - pytest.raises(HomeAssistantError), + stream = tts.async_create_stream(hass, "tts.test_tts", "en-US") + with patch( + "homeassistant.components.wyoming.tts.AsyncTcpClient", + MockAsyncTcpClient([None]), ): - await tts.async_get_media_source_audio( - hass, - tts.generate_media_source_id(hass, "Hello world", "tts.test_tts", "en-US"), - ) + stream.async_set_message("Hello world") + with pytest.raises(HomeAssistantError): + async for _chunk in stream.async_stream_result(): + pass async def test_get_tts_audio_audio_oserror( diff --git a/tests/components/wyoming/test_websocket.py b/tests/components/wyoming/test_websocket.py new file mode 100644 index 00000000000..18b43321354 --- /dev/null +++ b/tests/components/wyoming/test_websocket.py @@ -0,0 +1,58 @@ +"""Websocket tests for Wyoming integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from tests.typing import WebSocketGenerator + + +async def test_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + init_wyoming_stt: ConfigEntry, + init_wyoming_tts: ConfigEntry, + init_wyoming_wake_word: ConfigEntry, + init_wyoming_intent: ConfigEntry, + init_wyoming_handle: ConfigEntry, +) -> None: + """Test info websocket command.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "wyoming/info"}) + + # result + msg = await client.receive_json() + assert msg["success"] + + info = msg.get("result", {}).get("info", {}) + + # stt (speech-to-text) = asr (automated speech recognition) + assert init_wyoming_stt.entry_id in info + asr_info = info[init_wyoming_stt.entry_id].get("asr", []) + assert len(asr_info) == 1 + assert asr_info[0].get("name") == "Test ASR" + + # tts (text-to-speech) + assert init_wyoming_tts.entry_id in info + tts_info = info[init_wyoming_tts.entry_id].get("tts", []) + assert len(tts_info) == 1 + assert tts_info[0].get("name") == "Test TTS" + + # wake word detection + assert init_wyoming_wake_word.entry_id in info + wake_info = info[init_wyoming_wake_word.entry_id].get("wake", []) + assert len(wake_info) == 1 + assert wake_info[0].get("name") == "Test Wake Word" + + # intent recognition + assert init_wyoming_intent.entry_id in info + intent_info = info[init_wyoming_intent.entry_id].get("intent", []) + assert len(intent_info) == 1 + assert intent_info[0].get("name") == "Test Intent" + + # intent handling + assert init_wyoming_handle.entry_id in info + handle_info = info[init_wyoming_handle.entry_id].get("handle", []) + assert len(handle_info) == 1 + assert handle_info[0].get("name") == "Test Handle" diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index ba8aa9ea245..7a599b00a21 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -179,7 +179,16 @@ }), '0x0010': dict({ 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, + 'value': list([ + 50, + 79, + 50, + 2, + 0, + 141, + 21, + 0, + ]), }), '0x0011': dict({ 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index e5d588aa1bf..4bc4d6c97cf 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -81,7 +81,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } @@ -115,33 +115,33 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert state assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - # test that the state has changed from unavailable to off + # test that the state has changed from open to closed await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # test that the state remains after tilting to 100% - await send_attributes_report( - hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} - ) - assert hass.states.get(entity_id).state == CoverState.OPEN - - # test to see the state remains after tilting to 0% + # test that the state remains after tilting to 0% (open) await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # close from UI + # test that the state remains after tilting to 100% (closed) + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN + + # close lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True @@ -160,6 +160,11 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED + # close tilt from UI, needs re-opening first + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -185,7 +190,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED - # open from UI + # open lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True @@ -204,6 +209,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # open tilt from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -229,7 +235,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN - # set position UI + # set lift position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -261,6 +267,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # set tilt position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -281,13 +288,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 35} ) assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 53} ) assert hass.states.get(entity_id).state == CoverState.OPEN @@ -338,7 +345,7 @@ async def test_cover_failures( # load up cover domain cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) @@ -355,7 +362,7 @@ async def test_cover_failures( await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) assert hass.states.get(entity_id).state == CoverState.OPEN diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index bcdc0c3ce16..ce7b0e0109e 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -13,7 +13,9 @@ from zwave_js_server.model.node import Node from zwave_js_server.model.node.data_model import NodeDataType from zwave_js_server.version import VersionInfo +from homeassistant.components.zwave_js import PLATFORMS from homeassistant.components.zwave_js.const import DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonArrayType @@ -511,18 +513,25 @@ def aeotec_smart_switch_7_state_fixture() -> NodeDataType: @pytest.fixture(name="listen_block") -def mock_listen_block_fixture(): +def mock_listen_block_fixture() -> asyncio.Event: """Mock a listen block.""" return asyncio.Event() +@pytest.fixture(name="listen_result") +def listen_result_fixture() -> asyncio.Future[None]: + """Mock a listen result.""" + return asyncio.Future() + + @pytest.fixture(name="client") def mock_client_fixture( - controller_state, - controller_node_state, - version_state, - log_config_state, - listen_block, + controller_state: dict[str, Any], + controller_node_state: dict[str, Any], + version_state: dict[str, Any], + log_config_state: dict[str, Any], + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], ): """Mock a client.""" with patch( @@ -537,6 +546,7 @@ def mock_client_fixture( async def listen(driver_ready: asyncio.Event) -> None: driver_ready.set() await listen_block.wait() + await listen_result async def disconnect(): client.connected = False @@ -817,18 +827,29 @@ def nortek_thermostat_removed_event_fixture(client) -> Node: @pytest.fixture(name="integration") -async def integration_fixture(hass: HomeAssistant, client) -> MockConfigEntry: +async def integration_fixture( + hass: HomeAssistant, + client: MagicMock, + platforms: list[Platform], +) -> MockConfigEntry: """Set up the zwave_js integration.""" entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.zwave_js.PLATFORMS", platforms): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() client.async_send_command.reset_mock() return entry +@pytest.fixture +def platforms() -> list[Platform]: + """Fixture to specify platforms to test.""" + return PLATFORMS + + @pytest.fixture(name="chain_actuator_zws12") def window_cover_fixture(client, chain_actuator_zws12_state) -> Node: """Mock a window cover node.""" diff --git a/tests/components/zwave_js/fixtures/controller_state.json b/tests/components/zwave_js/fixtures/controller_state.json index d6d9dcacd9e..c3b9de4bdec 100644 --- a/tests/components/zwave_js/fixtures/controller_state.json +++ b/tests/components/zwave_js/fixtures/controller_state.json @@ -23,6 +23,7 @@ ], "sucNodeId": 1, "supportsTimers": false, + "supportsLongRange": true, "isHealNetworkActive": false, "inclusionState": 0, "status": 0 diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 42c5d59d7ad..f0134c7c43c 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from http import HTTPStatus from io import BytesIO import json from typing import Any -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest from zwave_js_server.const import ( @@ -103,6 +103,12 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [] + + def get_device(hass: HomeAssistant, node): """Get device ID for a node.""" dev_reg = dr.async_get(hass) @@ -168,6 +174,7 @@ async def test_network_status( assert result["client"]["server_version"] == "1.0.0" assert not result["client"]["server_logging_enabled"] assert result["controller"]["inclusion_state"] == InclusionState.IDLE + assert result["controller"]["supports_long_range"] # Try API call with device ID device = device_registry.async_get_device( @@ -1094,52 +1101,27 @@ async def test_provision_smart_start_node( client.async_send_command.return_value = {"success": True} - # Test provisioning entry - await ws_client.send_json( - { - ID: 2, - TYPE: "zwave_js/provision_smart_start_node", - ENTRY_ID: entry.entry_id, - PLANNED_PROVISIONING_ENTRY: { - DSK: "test", - SECURITY_CLASSES: [0], - }, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(client.async_send_command.call_args_list) == 1 - assert client.async_send_command.call_args[0][0] == { - "command": "controller.provision_smart_start_node", - "entry": ProvisioningEntry( - "test", [SecurityClass.S2_UNAUTHENTICATED] - ).to_dict(), + valid_qr_info = { + VERSION: 1, + SECURITY_CLASSES: [0], + DSK: "test", + GENERIC_DEVICE_CLASS: 1, + SPECIFIC_DEVICE_CLASS: 1, + INSTALLER_ICON_TYPE: 1, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 1, + PRODUCT_ID: 1, + APPLICATION_VERSION: "test", + "name": "test", } - client.async_send_command.reset_mock() - client.async_send_command.return_value = {"success": True} - # Test QR provisioning information await ws_client.send_json( { ID: 3, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_PROVISIONING_INFORMATION: { - VERSION: 1, - SECURITY_CLASSES: [0], - DSK: "test", - GENERIC_DEVICE_CLASS: 1, - SPECIFIC_DEVICE_CLASS: 1, - INSTALLER_ICON_TYPE: 1, - MANUFACTURER_ID: 1, - PRODUCT_TYPE: 1, - PRODUCT_ID: 1, - APPLICATION_VERSION: "test", - "name": "test", - }, + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) @@ -1170,28 +1152,6 @@ async def test_provision_smart_start_node( client.async_send_command.reset_mock() client.async_send_command.return_value = {"success": True} - # Test QR code string - await ws_client.send_json( - { - ID: 4, - TYPE: "zwave_js/provision_smart_start_node", - ENTRY_ID: entry.entry_id, - QR_CODE_STRING: "90testtesttesttesttesttesttesttesttesttesttesttesttest", - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(client.async_send_command.call_args_list) == 1 - assert client.async_send_command.call_args[0][0] == { - "command": "controller.provision_smart_start_node", - "entry": "90testtesttesttesttesttesttesttesttesttesttesttesttest", - } - - client.async_send_command.reset_mock() - client.async_send_command.return_value = {"success": True} - # Test QR provisioning information with S2 version throws error await ws_client.send_json( { @@ -1242,9 +1202,7 @@ async def test_provision_smart_start_node( ID: 7, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_CODE_STRING: ( - "90testtesttesttesttesttesttesttesttesttesttesttesttest" - ), + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) msg = await ws_client.receive_json() @@ -1262,7 +1220,7 @@ async def test_provision_smart_start_node( ID: 8, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_CODE_STRING: "90testtesttesttesttesttesttesttesttesttesttesttesttest", + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) msg = await ws_client.receive_json() @@ -5200,6 +5158,242 @@ async def test_get_integration_settings( } +async def test_backup_nvm( + hass: HomeAssistant, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the backup NVM websocket command.""" + ws_client = await hass_ws_client(hass) + + # Set up mocks for the controller events + controller = client.driver.controller + + # Test subscription and events + with patch.object( + controller, "async_backup_nvm_raw_base64", return_value="test" + ) as mock_backup: + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + + # Verify the finished event with data first + msg = await ws_client.receive_json() + assert msg["type"] == "event" + assert msg["event"]["event"] == "finished" + assert msg["event"]["data"] == "test" + + # Verify subscription success + msg = await ws_client.receive_json() + assert msg["type"] == "result" + assert msg["success"] is True + + # Simulate progress events + event = Event( + "nvm backup progress", + { + "source": "controller", + "event": "nvm backup progress", + "bytesRead": 25, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm backup progress" + assert msg["event"]["bytesRead"] == 25 + assert msg["event"]["total"] == 100 + + event = Event( + "nvm backup progress", + { + "source": "controller", + "event": "nvm backup progress", + "bytesRead": 50, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm backup progress" + assert msg["event"]["bytesRead"] == 50 + assert msg["event"]["total"] == 100 + + # Wait for the backup to complete + await hass.async_block_till_done() + + # Verify the backup was called + assert mock_backup.called + + # Test backup failure + with patch.object( + controller, + "async_backup_nvm_raw_base64", + side_effect=FailedCommand("failed_command", "Backup failed"), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "Backup failed" + + # Test config entry not found + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": "invalid_entry_id", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_found" + + # Test config entry not loaded + await hass.config_entries.async_unload(integration.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + msg = await ws_client.receive_json() + assert msg["error"]["code"] == "not_loaded" + + +async def test_restore_nvm( + hass: HomeAssistant, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the restore NVM websocket command.""" + ws_client = await hass_ws_client(hass) + + # Set up mocks for the controller events + controller = client.driver.controller + + # Test restore success + with patch.object( + controller, "async_restore_nvm_base64", return_value=None + ) as mock_restore: + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + + # Verify the finished event first + msg = await ws_client.receive_json() + assert msg["type"] == "event" + assert msg["event"]["event"] == "finished" + + # Verify subscription success + msg = await ws_client.receive_json() + assert msg["type"] == "result" + assert msg["success"] is True + + # Simulate progress events + event = Event( + "nvm restore progress", + { + "source": "controller", + "event": "nvm restore progress", + "bytesWritten": 25, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm restore progress" + assert msg["event"]["bytesWritten"] == 25 + assert msg["event"]["total"] == 100 + + event = Event( + "nvm restore progress", + { + "source": "controller", + "event": "nvm restore progress", + "bytesWritten": 50, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm restore progress" + assert msg["event"]["bytesWritten"] == 50 + assert msg["event"]["total"] == 100 + + # Wait for the restore to complete + await hass.async_block_till_done() + + # Verify the restore was called + assert mock_restore.called + + # Test restore failure + with patch.object( + controller, + "async_restore_nvm_base64", + side_effect=FailedCommand("failed_command", "Restore failed"), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "Restore failed" + + # Test entry_id not found + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": "invalid_entry_id", + "data": "dGVzdA==", # base64 encoded "test" + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_found" + + # Test config entry not loaded + await hass.config_entries.async_unload(integration.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_loaded" + + async def test_cancel_secure_bootstrap_s2( hass: HomeAssistant, client, integration, hass_ws_client: WebSocketGenerator ) -> None: @@ -5340,3 +5534,127 @@ async def test_subscribe_s2_inclusion( msg = await ws_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_lookup_device( + hass: HomeAssistant, + integration: MockConfigEntry, + client: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test lookup_device websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + # Create mock device response + mock_device = MagicMock() + mock_device.to_dict.return_value = { + "manufacturer": "Test Manufacturer", + "label": "Test Device", + "description": "Test Device Description", + "devices": [{"productType": 1, "productId": 2}], + "firmwareVersion": {"min": "1.0", "max": "2.0"}, + } + + # Test successful lookup + client.driver.config_manager.lookup_device = AsyncMock(return_value=mock_device) + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 2, + PRODUCT_ID: 3, + APPLICATION_VERSION: "1.5", + } + ) + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] == mock_device.to_dict.return_value + + client.driver.config_manager.lookup_device.assert_called_once_with(1, 2, 3, "1.5") + + # Reset mock + client.driver.config_manager.lookup_device.reset_mock() + + # Test lookup without optional application_version + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 4, + PRODUCT_TYPE: 5, + PRODUCT_ID: 6, + } + ) + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] == mock_device.to_dict.return_value + + client.driver.config_manager.lookup_device.assert_called_once_with(4, 5, 6, None) + + # Test device not found + with patch.object( + client.driver.config_manager, + "lookup_device", + return_value=None, + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 99, + PRODUCT_TYPE: 99, + PRODUCT_ID: 99, + APPLICATION_VERSION: "9.9", + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + assert msg["error"]["message"] == "Device not found" + + # Test sending command with improper entry ID fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: "invalid_entry_id", + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 1, + PRODUCT_ID: 1, + APPLICATION_VERSION: "1.0", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + assert msg["error"]["message"] == "Config entry invalid_entry_id not found" + + # Test FailedCommand exception + error_message = "Failed to execute lookup_device command" + with patch.object( + client.driver.config_manager, + "lookup_device", + side_effect=FailedCommand("lookup_device", error_message), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 2, + PRODUCT_ID: 3, + APPLICATION_VERSION: "1.0", + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == error_message + assert msg["error"]["message"] == f"Command failed: {error_message}" diff --git a/tests/components/zwave_js/test_binary_sensor.py b/tests/components/zwave_js/test_binary_sensor.py index 0054439ef1d..657dd337bf9 100644 --- a/tests/components/zwave_js/test_binary_sensor.py +++ b/tests/components/zwave_js/test_binary_sensor.py @@ -1,5 +1,6 @@ """Test the Z-Wave JS binary sensor platform.""" +import pytest from zwave_js_server.event import Event from zwave_js_server.model.node import Node @@ -10,6 +11,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,6 +28,12 @@ from .common import ( from tests.common import MockConfigEntry +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.BINARY_SENSOR] + + async def test_low_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, multisensor_6, integration ) -> None: diff --git a/tests/components/zwave_js/test_button.py b/tests/components/zwave_js/test_button.py index b0c06668926..0282a268b54 100644 --- a/tests/components/zwave_js/test_button.py +++ b/tests/components/zwave_js/test_button.py @@ -5,11 +5,17 @@ import pytest from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.zwave_js.const import DOMAIN, SERVICE_REFRESH_VALUE from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.BUTTON] + + async def test_ping_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index 5d711528a28..f312284d897 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -42,6 +42,7 @@ from homeassistant.const import ( ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -56,6 +57,12 @@ from .common import ( ) +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.CLIMATE] + + async def test_thermostat_v2( hass: HomeAssistant, client, diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index b13d4f9787f..13f519725fd 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -2,6 +2,7 @@ import logging +import pytest from zwave_js_server.const import ( CURRENT_STATE_PROPERTY, CURRENT_VALUE_PROPERTY, @@ -35,6 +36,7 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant @@ -50,6 +52,12 @@ FIBARO_FGR_223_SHUTTER_COVER_ENTITY = "cover.fgr_223_test_cover" LOGGER.setLevel(logging.DEBUG) +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.COVER] + + async def test_window_cover( hass: HomeAssistant, client, chain_actuator_zws12, integration ) -> None: diff --git a/tests/components/zwave_js/test_event.py b/tests/components/zwave_js/test_event.py index 1db02662f4e..84b1ade2632 100644 --- a/tests/components/zwave_js/test_event.py +++ b/tests/components/zwave_js/test_event.py @@ -3,11 +3,12 @@ from datetime import timedelta from freezegun import freeze_time +import pytest from zwave_js_server.event import Event from homeassistant.components.event import ATTR_EVENT_TYPE from homeassistant.components.zwave_js.const import ATTR_VALUE -from homeassistant.const import STATE_UNKNOWN +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -15,6 +16,12 @@ BASIC_EVENT_VALUE_ENTITY = "event.honeywell_in_wall_smart_fan_control_event_valu CENTRAL_SCENE_ENTITY = "event.node_51_scene_002" +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.EVENT] + + async def test_basic( hass: HomeAssistant, client, fan_honeywell_39358, integration ) -> None: diff --git a/tests/components/zwave_js/test_events.py b/tests/components/zwave_js/test_events.py index 0bb6376a02b..8cdaef3e63d 100644 --- a/tests/components/zwave_js/test_events.py +++ b/tests/components/zwave_js/test_events.py @@ -6,11 +6,18 @@ import pytest from zwave_js_server.const import CommandClass from zwave_js_server.event import Event +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from tests.common import async_capture_events +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [] + + async def test_scenes( hass: HomeAssistant, hank_binary_switch, integration, client ) -> None: @@ -244,6 +251,7 @@ async def test_notifications( assert events[2].data["command_class_name"] == "Multilevel Switch" +@pytest.mark.parametrize("platforms", [[Platform.SWITCH]]) async def test_value_updated( hass: HomeAssistant, vision_security_zl7432, integration, client ) -> None: diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index c575066b57c..91e333f7c7d 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -3,14 +3,19 @@ import asyncio from copy import deepcopy import logging -from unittest.mock import AsyncMock, call, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, call, patch from aiohasupervisor import SupervisorError from aiohasupervisor.models import AddonsOptions import pytest from zwave_js_server.client import Client from zwave_js_server.event import Event -from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion +from zwave_js_server.exceptions import ( + BaseZwaveJSServerError, + InvalidServerVersion, + NotConnected, +) from zwave_js_server.model.node import Node from zwave_js_server.model.version import VersionInfo @@ -21,7 +26,7 @@ from homeassistant.components.zwave_js import DOMAIN from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import ( area_registry as ar, device_registry as dr, @@ -32,7 +37,11 @@ from homeassistant.setup import async_setup_component from .common import AIR_TEMPERATURE_SENSOR, EATON_RF9640_ENTITY -from tests.common import MockConfigEntry, async_get_persistent_notifications +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + async_get_persistent_notifications, +) from tests.typing import WebSocketGenerator @@ -127,24 +136,215 @@ async def test_noop_statistics(hass: HomeAssistant, client) -> None: assert not mock_cmd2.called -@pytest.mark.parametrize("error", [BaseZwaveJSServerError("Boom"), Exception("Boom")]) -async def test_listen_failure(hass: HomeAssistant, client, error) -> None: - """Test we handle errors during client listen.""" +async def test_driver_ready_timeout_during_setup( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, +) -> None: + """Test we handle driver ready timeout during setup.""" - async def listen(driver_ready): - """Mock the client listen method.""" - # Set the connect side effect to stop an endless loop on reload. - client.connect.side_effect = BaseZwaveJSServerError("Boom") - raise error + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + await listen_block.wait() client.listen.side_effect = listen + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + with patch("homeassistant.components.zwave_js.DRIVER_READY_TIMEOUT", new=0): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize("core_state", [CoreState.running, CoreState.stopping]) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_during_setup_before_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, +) -> None: + """Test listen task finishing during setup before forward entry.""" + assert hass.state is CoreState.running + + async def listen(driver_ready: asyncio.Event) -> None: + await listen_block.wait() + await listen_result + async_fire_time_changed(hass, fire_all=True) + + client.listen.side_effect = listen + hass.set_state(core_state) + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +async def test_not_connected_during_setup_after_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], +) -> None: + """Test we handle not connected client during setup after forward entry.""" + + async def send_command_side_effect(*args: Any, **kwargs: Any) -> None: + """Mock send command.""" + listen_block.set() + listen_result.set_result(None) + # Yield to allow the listen task to run + await asyncio.sleep(0) + raise NotConnected("Boom") + + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + driver_ready.set() + client.async_send_command.side_effect = send_command_side_effect + await listen_block.wait() + await listen_result + + client.listen.side_effect = listen + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize("core_state", [CoreState.running, CoreState.stopping]) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_during_setup_after_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, +) -> None: + """Test listen task finishing during setup after forward entry.""" + assert hass.state is CoreState.running + + async def send_command_side_effect(*args: Any, **kwargs: Any) -> None: + """Mock send command.""" + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + # Yield to allow the listen task to run + await asyncio.sleep(0) + + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + driver_ready.set() + client.async_send_command.side_effect = send_command_side_effect + await listen_block.wait() + await listen_result + + client.listen.side_effect = listen + hass.set_state(core_state) + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize( + ("core_state", "final_config_entry_state", "disconnect_call_count"), + [ + ( + CoreState.running, + ConfigEntryState.SETUP_RETRY, + 2, + ), # the reload will cause a disconnect call too + ( + CoreState.stopping, + ConfigEntryState.LOADED, + 0, + ), # the home assistant stop event will handle the disconnect + ], +) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_after_setup( + hass: HomeAssistant, + client: MagicMock, + integration: MockConfigEntry, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, + final_config_entry_state: ConfigEntryState, + disconnect_call_count: int, +) -> None: + """Test listen task finishing after setup.""" + config_entry = integration + assert config_entry.state is ConfigEntryState.LOADED + assert hass.state is CoreState.running + assert client.disconnect.call_count == 0 + + hass.set_state(core_state) + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + await hass.async_block_till_done() + + assert config_entry.state is final_config_entry_state + assert client.disconnect.call_count == disconnect_call_count async def test_new_entity_on_value_added( diff --git a/tests/components/zwave_js/test_siren.py b/tests/components/zwave_js/test_siren.py index 4eb872954d1..d932338f9dc 100644 --- a/tests/components/zwave_js/test_siren.py +++ b/tests/components/zwave_js/test_siren.py @@ -1,5 +1,6 @@ """Test the Z-Wave JS siren platform.""" +import pytest from zwave_js_server.event import Event from homeassistant.components.siren import ( @@ -7,7 +8,7 @@ from homeassistant.components.siren import ( ATTR_TONE, ATTR_VOLUME_LEVEL, ) -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant SIREN_ENTITY = "siren.indoor_siren_6_play_tone_2" @@ -64,6 +65,12 @@ TONE_ID_VALUE_ID = { } +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SIREN] + + async def test_siren( hass: HomeAssistant, client, aeotec_zw164_siren, integration ) -> None: diff --git a/tests/components/zwave_js/test_update.py b/tests/components/zwave_js/test_update.py index d6683fa24cb..6a4f48a0dc5 100644 --- a/tests/components/zwave_js/test_update.py +++ b/tests/components/zwave_js/test_update.py @@ -658,8 +658,10 @@ async def test_update_entity_delay( assert len(client.async_send_command.call_args_list) == 2 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done(wait_background_tasks=True) + update_interval = timedelta(minutes=5) + freezer.tick(update_interval) + async_fire_time_changed(hass) + await hass.async_block_till_done() nodes: set[int] = set() @@ -668,8 +670,9 @@ async def test_update_entity_delay( assert args["command"] == "controller.get_available_firmware_updates" nodes.add(args["nodeId"]) - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10)) - await hass.async_block_till_done(wait_background_tasks=True) + freezer.tick(update_interval) + async_fire_time_changed(hass) + await hass.async_block_till_done() assert len(client.async_send_command.call_args_list) == 4 args = client.async_send_command.call_args_list[3][0][0] diff --git a/tests/conftest.py b/tests/conftest.py index 7725189aa53..65e3518956e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -67,7 +67,12 @@ from homeassistant.components.websocket_api.auth import ( # pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE -from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ( + ConfigEntries, + ConfigEntry, + ConfigEntryState, + ConfigSubentryData, +) from homeassistant.const import BASE_PLATFORMS, HASSIO_USER_NAME from homeassistant.core import ( Context, @@ -946,6 +951,12 @@ def mqtt_config_entry_data() -> dict[str, Any] | None: return None +@pytest.fixture +def mqtt_config_subentries_data() -> tuple[ConfigSubentryData] | None: + """Fixture to allow overriding MQTT subentries data.""" + return None + + @pytest.fixture def mqtt_config_entry_options() -> dict[str, Any] | None: """Fixture to allow overriding MQTT entry options.""" @@ -1032,6 +1043,7 @@ async def mqtt_mock( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> AsyncGenerator[MqttMockHAClient]: """Fixture to mock MQTT component.""" @@ -1044,6 +1056,7 @@ async def _mqtt_mock_entry( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, ) -> AsyncGenerator[MqttMockHAClientGenerator]: """Fixture to mock a delayed setup of the MQTT config entry.""" # Local import to avoid processing MQTT modules when running a testcase @@ -1060,6 +1073,7 @@ async def _mqtt_mock_entry( entry = MockConfigEntry( data=mqtt_config_entry_data, options=mqtt_config_entry_options, + subentries_data=mqtt_config_subentries_data, domain=mqtt.DOMAIN, title="MQTT", version=1, @@ -1174,6 +1188,7 @@ async def mqtt_mock_entry( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, ) -> AsyncGenerator[MqttMockHAClientGenerator]: """Set up an MQTT config entry.""" @@ -1190,7 +1205,11 @@ async def mqtt_mock_entry( return await mqtt_mock_entry(_async_setup_config_entry) async with _mqtt_mock_entry( - hass, mqtt_client_mock, mqtt_config_entry_data, mqtt_config_entry_options + hass, + mqtt_client_mock, + mqtt_config_entry_data, + mqtt_config_entry_options, + mqtt_config_subentries_data, ) as mqtt_mock_entry: yield _setup_mqtt_entry diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 0fc6b582bb5..5d16a9a62fd 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -1,11 +1,11 @@ """Tests for the Somfy config flow.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator from http import HTTPStatus import logging import time from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import aiohttp import pytest @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.network import NoURLAvailableError -from tests.common import MockConfigEntry, mock_platform +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -27,6 +27,11 @@ ACCESS_TOKEN_1 = "mock-access-token-1" ACCESS_TOKEN_2 = "mock-access-token-2" AUTHORIZE_URL = "https://example.como/auth/authorize" TOKEN_URL = "https://example.como/auth/token" +MOCK_SECRET_TOKEN_URLSAFE = ( + "token-" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +) @pytest.fixture @@ -40,6 +45,22 @@ async def local_impl( ) +@pytest.fixture +async def local_impl_pkce( + hass: HomeAssistant, +) -> AsyncGenerator[config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce]: + """Local implementation.""" + assert await setup.async_setup_component(hass, "auth", {}) + with patch( + "homeassistant.helpers.config_entry_oauth2_flow.secrets.token_urlsafe", + return_value=MOCK_SECRET_TOKEN_URLSAFE + + "bbbbbb", # Add some characters that should be removed by the logic. + ): + yield config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce( + hass, TEST_DOMAIN, CLIENT_ID, AUTHORIZE_URL, TOKEN_URL + ) + + @pytest.fixture def flow_handler( hass: HomeAssistant, @@ -963,3 +984,143 @@ async def test_oauth2_without_secret_init( client = await hass_client_no_auth() resp = await client.get("/auth/external/callback?code=abcd&state=qwer") assert resp.status == 400 + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_abort_oauth_with_pkce_rejected( + hass: HomeAssistant, + flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], + local_impl_pkce: config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce, + hass_client_no_auth: ClientSessionGenerator, +) -> None: + """Check bad oauth token.""" + flow_handler.async_register_implementation(hass, local_impl_pkce) + + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + code_challenge = local_impl_pkce.compute_code_challenge(MOCK_SECRET_TOKEN_URLSAFE) + assert result["type"] == data_entry_flow.FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(f"{AUTHORIZE_URL}?") + assert f"client_id={CLIENT_ID}" in result["url"] + assert "redirect_uri=https://example.com/auth/external/callback" in result["url"] + assert f"state={state}" in result["url"] + assert "scope=read+write" in result["url"] + assert "response_type=code" in result["url"] + assert f"code_challenge={code_challenge}" in result["url"] + assert "code_challenge_method=S256" in result["url"] + + client = await hass_client_no_auth() + resp = await client.get( + f"/auth/external/callback?error=access_denied&state={state}" + ) + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "user_rejected_authorize" + assert result["description_placeholders"] == {"error": "access_denied"} + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_oauth_with_pkce_adds_code_verifier_to_token_resolve( + hass: HomeAssistant, + flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], + local_impl_pkce: config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check pkce flow.""" + + mock_integration( + hass, + MockModule( + domain=TEST_DOMAIN, + async_setup_entry=AsyncMock(return_value=True), + ), + ) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow", None) + flow_handler.async_register_implementation(hass, local_impl_pkce) + + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + code_challenge = local_impl_pkce.compute_code_challenge(MOCK_SECRET_TOKEN_URLSAFE) + assert result["type"] == data_entry_flow.FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(f"{AUTHORIZE_URL}?") + assert f"client_id={CLIENT_ID}" in result["url"] + assert "redirect_uri=https://example.com/auth/external/callback" in result["url"] + assert f"state={state}" in result["url"] + assert "scope=read+write" in result["url"] + assert "response_type=code" in result["url"] + assert f"code_challenge={code_challenge}" in result["url"] + assert "code_challenge_method=S256" in result["url"] + + # Setup the response when HA tries to fetch a token with the code + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": REFRESH_TOKEN, + "access_token": ACCESS_TOKEN_1, + "type": "bearer", + "expires_in": 60, + }, + ) + + client = await hass_client_no_auth() + # trigger the callback + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Verify the token resolve request occurred + assert len(aioclient_mock.mock_calls) == 1 + assert aioclient_mock.mock_calls[0][2] == { + "client_id": CLIENT_ID, + "grant_type": "authorization_code", + "code": "abcd", + "redirect_uri": "https://example.com/auth/external/callback", + "code_verifier": MOCK_SECRET_TOKEN_URLSAFE, + } + + +@pytest.mark.parametrize("code_verifier_length", [40, 129]) +def test_generate_code_verifier_invalid_length(code_verifier_length: int) -> None: + """Test generate_code_verifier with an invalid length.""" + with pytest.raises(ValueError): + config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce.generate_code_verifier( + code_verifier_length + ) + + +@pytest.mark.parametrize("code_verifier", ["", "yyy", "a" * 129]) +def test_compute_code_challenge_invalid_code_verifier(code_verifier: str) -> None: + """Test compute_code_challenge with an invalid code_verifier.""" + with pytest.raises(ValueError): + config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce.compute_code_challenge( + code_verifier + ) diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 630ed3f4fa1..19ada407550 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -181,19 +181,19 @@ async def test_assist_api( assert len(llm.async_get_apis(hass)) == 1 api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 0 + assert [tool.name for tool in api.tools] == ["get_home_state"] # Match all intent_handler.platforms = None api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 1 + assert [tool.name for tool in api.tools] == ["test_intent", "get_home_state"] # Match specific domain intent_handler.platforms = {"light"} api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 1 + assert len(api.tools) == 2 tool = api.tools[0] assert tool.name == "test_intent" assert tool.description == "Execute Home Assistant test_intent intent" @@ -622,6 +622,40 @@ async def test_assist_api_prompt( domain: light state: unavailable areas: Test Area 2 +""" + stateless_exposed_entities_prompt = """An overview of the areas and the devices in this smart home: +- names: Kitchen + domain: light +- names: Living Room + domain: light + areas: Test Area, Alternative name +- names: Test Device, my test light + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Device 2 + domain: light + areas: Test Area 2 +- names: Test Device 3 + domain: light + areas: Test Area 2 +- names: Test Device 4 + domain: light + areas: Test Area 2 +- names: Unnamed Device + domain: light + areas: Test Area 2 +- names: '1' + domain: light + areas: Test Area 2 """ first_part_prompt = ( "When controlling Home Assistant always call the intent tools. " @@ -640,9 +674,18 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) + # Verify that the get_home_state tool returns the same results as the exposed_entities_prompt + result = await api.async_call_tool( + llm.ToolInput(tool_name="get_home_state", tool_args={}) + ) + assert result == { + "success": True, + "result": exposed_entities_prompt, + } + # Fake that request is made from a specific device ID with an area llm_context.device_id = device.id area_prompt = ( @@ -654,7 +697,7 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) # Add floor @@ -669,7 +712,7 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) # Register device for timers @@ -680,7 +723,7 @@ async def test_assist_api_prompt( assert api.api_prompt == ( f"""{first_part_prompt} {area_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) @@ -1267,3 +1310,19 @@ async def test_calendar_get_events_tool(hass: HomeAssistant) -> None: "start_date_time": now, "end_date_time": dt_util.start_of_local_day() + timedelta(days=7), } + + +async def test_no_tools_exposed(hass: HomeAssistant) -> None: + """Test that tools are not exposed when no entities are exposed.""" + assert await async_setup_component(hass, "homeassistant", {}) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + api = await llm.async_get_api(hass, "assist", llm_context) + assert api.tools == [] diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index df589a41daa..f8552fcefed 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -494,7 +494,7 @@ async def test_calling_service_response_data_in_scopes(hass: HomeAssistant) -> N assert result.variables["my_response"] == expected_var expected_trace = { - "0": [{"variables": {"my_response": expected_var}}], + "0": [{"variables": {"my_response": expected_var, "state": "off"}}], "0/parallel/0/sequence/0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/1": [ { @@ -1797,7 +1797,7 @@ async def test_wait_in_sequence(hass: HomeAssistant) -> None: assert result.variables["wait"] == expected_var expected_trace = { - "0": [{"variables": {"wait": expected_var}}], + "0": [{"variables": {"wait": expected_var, "state": "off"}}], "0/sequence/0": [{"variables": {"state": "off"}}], "0/sequence/1": [ { @@ -1840,7 +1840,7 @@ async def test_wait_in_parallel(hass: HomeAssistant) -> None: assert "wait" not in result.variables expected_trace = { - "0": [{}], + "0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/1": [ { @@ -5277,11 +5277,23 @@ async def test_set_variable( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test setting variables in scripts.""" - alias = "variables step" sequence = cv.SCRIPT_SCHEMA( [ - {"alias": alias, "variables": {"variable": "value"}}, - {"action": "test.script", "data": {"value": "{{ variable }}"}}, + {"alias": "variables", "variables": {"x": 1, "y": 1}}, + { + "alias": "scope", + "sequence": [ + {"variables": {"y": 3, "z": 3}}, + { + "action": "test.script", + "data": {"value": "x={{ x }}, y={{ y }}, z={{ z }}"}, + }, + ], + }, + { + "action": "test.script", + "data": {"value": "x={{ x }}, y={{ y }}, z={{ z }}"}, + }, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5291,18 +5303,36 @@ async def test_set_variable( await script_obj.async_run(context=Context()) await hass.async_block_till_done() - assert mock_calls[0].data["value"] == "value" - assert f"Executing step {alias}" in caplog.text + assert len(mock_calls) == 2 + assert mock_calls[0].data["value"] == "x=1, y=3, z=3" + assert mock_calls[1].data["value"] == "x=1, y=3, z=3" + + assert "Executing step variables" in caplog.text expected_trace = { - "0": [{"variables": {"variable": "value"}}], - "1": [ + "0": [{"variables": {"x": 1, "y": 1}}], + "1": [{"variables": {"y": 3, "z": 3}}], + "1/sequence/0": [{"variables": {"y": 3, "z": 3}}], + "1/sequence/1": [ { "result": { "params": { "domain": "test", "service": "script", - "service_data": {"value": "value"}, + "service_data": {"value": "x=1, y=3, z=3"}, + "target": {}, + }, + "running_script": False, + }, + } + ], + "2": [ + { + "result": { + "params": { + "domain": "test", + "service": "script", + "service_data": {"value": "x=1, y=3, z=3"}, "target": {}, }, "running_script": False, @@ -5899,7 +5929,9 @@ async def test_stop_action_nested_response_variables( "variables": {"var": var, "output": {"value": "Testing 123"}}, } ], - "1": [{"result": {"choice": choice}}], + "1": [ + {"result": {"choice": choice}, "variables": {"output": {"value": response}}} + ], "1/if": [{"result": {"result": if_result}}], "1/if/condition/0": [{"result": {"result": var == 1, "entities": []}}], f"1/{choice}/0": [{"variables": {"output": {"value": response}}}], diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 142f7a23f81..70ab20e87fa 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -961,7 +961,7 @@ async def test_async_get_all_descriptions_dot_keys(hass: HomeAssistant) -> None: side_effect=service._load_services_files, ) as proxy_load_services_files, patch( - "homeassistant.util.yaml.loader.load_yaml", + "annotatedyaml.loader.load_yaml", side_effect=load_yaml, ) as mock_load_yaml, ): @@ -1033,7 +1033,7 @@ async def test_async_get_all_descriptions_filter(hass: HomeAssistant) -> None: side_effect=service._load_services_files, ) as proxy_load_services_files, patch( - "homeassistant.util.yaml.loader.load_yaml", + "annotatedyaml.loader.load_yaml", side_effect=load_yaml, ) as mock_load_yaml, ): diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index bdf400ce357..89d1c307fd7 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6790,6 +6790,184 @@ def test_flatten(hass: HomeAssistant) -> None: template.Template("{{ flatten() }}", hass).async_render() +def test_intersect(hass: HomeAssistant) -> None: + """Test the intersect function and filter.""" + assert list( + template.Template( + "{{ intersect([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | intersect([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ intersect(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | intersect(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert ( + template.Template("{{ intersect([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | intersect([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ 'string' | intersect([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ [1, 2, 3] | intersect('string') }}", hass).async_render() + + +def test_difference(hass: HomeAssistant) -> None: + """Test the difference function and filter.""" + assert list( + template.Template( + "{{ difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | difference([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert ( + template.Template("{{ difference([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | difference([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ 'string' | difference([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ [1, 2, 3] | difference('string') }}", hass).async_render() + + +def test_union(hass: HomeAssistant) -> None: + """Test the union function and filter.""" + assert list( + template.Template( + "{{ union([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | union([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ union(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | union(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template("{{ union([], [1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template("{{ [] | union([1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ 'string' | union([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ [1, 2, 3] | union('string') }}", hass).async_render() + + +def test_symmetric_difference(hass: HomeAssistant) -> None: + """Test the symmetric_difference function and filter.""" + assert list( + template.Template( + "{{ symmetric_difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | symmetric_difference([1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ symmetric_difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | symmetric_difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ symmetric_difference([], [1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template( + "{{ [] | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ 'string' | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ [1, 2, 3] | symmetric_difference('string') }}", hass + ).async_render() + + def test_md5(hass: HomeAssistant) -> None: """Test the md5 function and filter.""" assert ( @@ -6840,3 +7018,39 @@ def test_sha512(hass: HomeAssistant) -> None: template.Template("{{ 'Home Assistant' | sha512 }}", hass).async_render() == "9e3c2cdd1fbab0037378d37e1baf8a3a4bf92c54b56ad1d459deee30ccbb2acbebd7a3614552ea08992ad27dedeb7b4c5473525ba90cb73dbe8b9ec5f69295bb" ) + + +def test_combine(hass: HomeAssistant) -> None: + """Test combine filter and function.""" + assert template.Template( + "{{ {'a': 1, 'b': 2} | combine({'b': 3, 'c': 4}) }}", hass + ).async_render() == {"a": 1, "b": 3, "c": 4} + + assert template.Template( + "{{ combine({'a': 1, 'b': 2}, {'b': 3, 'c': 4}) }}", hass + ).async_render() == {"a": 1, "b": 3, "c": 4} + + assert template.Template( + "{{ combine({'a': 1, 'b': {'x': 1}}, {'b': {'y': 2}, 'c': 4}, recursive=True) }}", + hass, + ).async_render() == {"a": 1, "b": {"x": 1, "y": 2}, "c": 4} + + # Test that recursive=False does not merge nested dictionaries + assert template.Template( + "{{ combine({'a': 1, 'b': {'x': 1}}, {'b': {'y': 2}, 'c': 4}, recursive=False) }}", + hass, + ).async_render() == {"a": 1, "b": {"y": 2}, "c": 4} + + # Test that None values are handled correctly in recursive merge + assert template.Template( + "{{ combine({'a': 1, 'b': none}, {'b': {'y': 2}, 'c': 4}, recursive=True) }}", + hass, + ).async_render() == {"a": 1, "b": {"y": 2}, "c": 4} + + with pytest.raises( + TemplateError, match="combine expected at least 1 argument, got 0" + ): + template.Template("{{ combine() }}", hass).async_render() + + with pytest.raises(TemplateError, match="combine expected a dict, got str"): + template.Template("{{ {'a': 1} | combine('not a dict') }}", hass).async_render() diff --git a/tests/snapshots/test_config.ambr b/tests/snapshots/test_config.ambr index 6fcbce7d8d6..7531bf5a663 100644 --- a/tests/snapshots/test_config.ambr +++ b/tests/snapshots/test_config.ambr @@ -434,7 +434,7 @@ # name: test_yaml_error[basic] ''' mapping values are not allowed here - in "configuration.yaml", line 4, column 14 + in "/fixtures/core/config/yaml_errors/basic/configuration.yaml", line 4, column 14 ''' # --- # name: test_yaml_error[basic].1 @@ -448,7 +448,7 @@ # name: test_yaml_error[basic_include] ''' mapping values are not allowed here - in "integrations/iot_domain.yaml", line 3, column 12 + in "/fixtures/core/config/yaml_errors/basic_include/integrations/iot_domain.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[basic_include].1 @@ -462,7 +462,7 @@ # name: test_yaml_error[include_dir_list] ''' mapping values are not allowed here - in "iot_domain/iot_domain_1.yaml", line 3, column 10 + in "/fixtures/core/config/yaml_errors/include_dir_list/iot_domain/iot_domain_1.yaml", line 3, column 10 ''' # --- # name: test_yaml_error[include_dir_list].1 @@ -476,7 +476,7 @@ # name: test_yaml_error[include_dir_merge_list] ''' mapping values are not allowed here - in "iot_domain/iot_domain_1.yaml", line 3, column 12 + in "/fixtures/core/config/yaml_errors/include_dir_merge_list/iot_domain/iot_domain_1.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[include_dir_merge_list].1 @@ -490,7 +490,7 @@ # name: test_yaml_error[packages_include_dir_named] ''' mapping values are not allowed here - in "integrations/adr_0007_1.yaml", line 4, column 9 + in "/fixtures/core/config/yaml_errors/packages_include_dir_named/integrations/adr_0007_1.yaml", line 4, column 9 ''' # --- # name: test_yaml_error[packages_include_dir_named].1 diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index 4c6bc930667..7efe25c8428 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -15,7 +15,7 @@ from .common import get_test_config_dir @pytest.fixture(autouse=True) -def remove_restore_result_file() -> Generator[None, Any, Any]: +def remove_restore_result_file() -> Generator[None]: """Remove the restore result file.""" yield Path(get_test_config_dir(".HA_RESTORE_RESULT")).unlink(missing_ok=True) diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index f42fbb9f4ef..337e5500718 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -459,3 +459,14 @@ async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> pass assert "Detected blocking call to open with args" not in caplog.text + + +async def test_protect_loop_set_default_verify_paths( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.set_default_verify_paths calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + context.set_default_verify_paths() + assert "Detected blocking call to set_default_verify_paths" in caplog.text diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index e89d038f8ce..ca75dc51c56 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -252,8 +252,8 @@ async def test_setup_after_deps_all_present(hass: HomeAssistant) -> None: @pytest.mark.parametrize("load_registries", [False]) -async def test_setup_after_deps_in_stage_1_ignored(hass: HomeAssistant) -> None: - """Test after_dependencies are ignored in stage 1.""" +async def test_setup_after_deps_in_stage_1(hass: HomeAssistant) -> None: + """Test after_dependencies are promoted in stage 1.""" # This test relies on this assert "cloud" in bootstrap.STAGE_1_INTEGRATIONS order = [] @@ -295,7 +295,7 @@ async def test_setup_after_deps_in_stage_1_ignored(hass: HomeAssistant) -> None: assert "normal_integration" in hass.config.components assert "cloud" in hass.config.components - assert order == ["cloud", "an_after_dep", "normal_integration"] + assert order == ["an_after_dep", "normal_integration", "cloud"] @pytest.mark.parametrize("load_registries", [False]) @@ -304,7 +304,7 @@ async def test_setup_after_deps_manifests_are_loaded_even_if_not_setup( ) -> None: """Ensure we preload manifests for after deps even if they are not setup. - Its important that we preload the after dep manifests even if they are not setup + It's important that we preload the after dep manifests even if they are not setup since we will always have to check their requirements since any integration that lists an after dep may import it and we have to ensure requirements are up to date before the after dep can be imported. @@ -371,7 +371,7 @@ async def test_setup_after_deps_manifests_are_loaded_even_if_not_setup( assert "an_after_dep" not in hass.config.components assert "an_after_dep_of_after_dep" not in hass.config.components assert "an_after_dep_of_after_dep_of_after_dep" not in hass.config.components - assert order == ["cloud", "normal_integration"] + assert order == ["normal_integration", "cloud"] assert loader.async_get_loaded_integration(hass, "an_after_dep") is not None assert ( loader.async_get_loaded_integration(hass, "an_after_dep_of_after_dep") @@ -456,9 +456,9 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: assert order == [ "http", + "an_after_dep", "frontend", "recorder", - "an_after_dep", "normal_integration", ] @@ -572,7 +572,7 @@ async def test_setup_after_deps_not_present(hass: HomeAssistant) -> None: MockModule( domain="second_dep", async_setup=gen_domain_setup("second_dep"), - partial_manifest={"after_dependencies": ["first_dep"]}, + partial_manifest={"after_dependencies": ["first_dep", "root"]}, ), ) @@ -1169,6 +1169,7 @@ async def test_bootstrap_is_cancellation_safe( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test cancellation during async_setup_component does not cancel bootstrap.""" + mock_integration(hass, MockModule(domain="cancel_integration")) with patch.object( bootstrap, "async_setup_component", side_effect=asyncio.CancelledError ): @@ -1185,6 +1186,18 @@ async def test_bootstrap_empty_integrations(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.mark.parametrize("load_registries", [False]) +async def test_bootstrap_log_already_setup_stage( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test logging when all integrations in a stage were already setup.""" + with patch.object(bootstrap, "STAGE_1_INTEGRATIONS", {"frontend"}): + await bootstrap._async_set_up_integrations(hass, {}) + await hass.async_block_till_done() + + assert "Already set up stage 1: {'frontend'}" in caplog.text + + @pytest.fixture(name="mock_mqtt_config_flow") def mock_mqtt_config_flow_fixture() -> Generator[None]: """Mock MQTT config flow.""" @@ -1533,41 +1546,70 @@ def test_should_rollover_is_always_false() -> None: async def test_no_base_platforms_loaded_before_recorder(hass: HomeAssistant) -> None: """Verify stage 0 not load base platforms before recorder. - If a stage 0 integration has a base platform in its dependencies and - it loads before the recorder, it may load integrations that expect - the recorder to be loaded. We need to ensure that no stage 0 integration - has a base platform in its dependencies that loads before the recorder. + If a stage 0 integration implements base platforms or has a base + platform in its dependencies and it loads before the recorder, + because of platform-based YAML schema, it may inadvertently + load integrations that expect the recorder to already be loaded. + We need to ensure that doesn't happen. """ + IGNORE_BASE_PLATFORM_FILES = { + # config/scene.py is not a platform + "config": {"scene.py"}, + # websocket_api/sensor.py is using the platform YAML schema + # we must not migrate it to an integration key until + # we remove the platform YAML schema support for sensors + "websocket_api": {"sensor.py"}, + } + integrations_before_recorder: set[str] = set() for _, integrations, _ in bootstrap.STAGE_0_INTEGRATIONS: integrations_before_recorder |= integrations if "recorder" in integrations: break + else: + pytest.fail("recorder not in stage 0") - integrations_or_execs = await loader.async_get_integrations( + integrations_or_excs = await loader.async_get_integrations( hass, integrations_before_recorder ) - integrations: list[Integration] = [] - resolve_deps_tasks: list[asyncio.Task[bool]] = [] - for integration in integrations_or_execs.values(): - assert not isinstance(integrations_or_execs, Exception) - integrations.append(integration) - resolve_deps_tasks.append(integration.resolve_dependencies()) + integrations: dict[str, Integration] = {} + for domain, integration in integrations_or_excs.items(): + assert not isinstance(integrations_or_excs, Exception) + integrations[domain] = integration + + integrations_all_dependencies = ( + await loader.resolve_integrations_after_dependencies( + hass, integrations.values(), ignore_exceptions=True + ) + ) + all_integrations = integrations.copy() + all_integrations.update( + (domain, loader.async_get_loaded_integration(hass, domain)) + for domains in integrations_all_dependencies.values() + for domain in domains + ) + + problems: dict[str, set[str]] = {} + for domain in integrations: + domain_with_base_platforms_deps = ( + integrations_all_dependencies[domain] & BASE_PLATFORMS + ) + if domain_with_base_platforms_deps: + problems[domain] = domain_with_base_platforms_deps + assert not problems, ( + f"Integrations that are setup before recorder have base platforms in their dependencies: {problems}" + ) - await asyncio.gather(*resolve_deps_tasks) base_platform_py_files = {f"{base_platform}.py" for base_platform in BASE_PLATFORMS} - for integration in integrations: - domain_with_base_platforms_deps = BASE_PLATFORMS.intersection( - integration.all_dependencies - ) - assert not domain_with_base_platforms_deps, ( - f"{integration.domain} has base platforms in dependencies: " - f"{domain_with_base_platforms_deps}" - ) - integration_top_level_files = base_platform_py_files.intersection( - integration._top_level_files - ) - assert not integration_top_level_files, ( - f"{integration.domain} has base platform files in top level files: " - f"{integration_top_level_files}" + + for domain, integration in all_integrations.items(): + integration_base_platforms_files = ( + integration._top_level_files & base_platform_py_files ) + if ignore := IGNORE_BASE_PLATFORM_FILES.get(domain): + integration_base_platforms_files -= ignore + if integration_base_platforms_files: + problems[domain] = integration_base_platforms_files + assert not problems, ( + f"Integrations that are setup before recorder implement base platforms: {problems}" + ) diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index d19c3b38650..e3b80ecc03f 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -1421,83 +1421,6 @@ async def test_discovery_notification( assert "config_entry_discovery" not in notifications -async def test_reauth_notification(hass: HomeAssistant) -> None: - """Test that we create/dismiss a notification when source is reauth.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - entry = MockConfigEntry(title="test_title", domain="test") - entry.add_to_hass(hass) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 5 - - async def async_step_user(self, user_input): - """Test user step.""" - return self.async_show_form(step_id="user_confirm") - - async def async_step_user_confirm(self, user_input): - """Test user confirm step.""" - return self.async_show_form(step_id="user_confirm") - - async def async_step_reauth(self, user_input): - """Test reauth step.""" - return self.async_show_form(step_id="reauth_confirm") - - async def async_step_reauth_confirm(self, user_input): - """Test reauth confirm step.""" - return self.async_abort(reason="test") - - with mock_config_flow("test", TestFlow): - # Start user flow to assert that reconfigure notification doesn't fire - await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_USER} - ) - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" not in notifications - - # Start first reauth flow to assert that reconfigure notification fires - flow1 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" in notifications - - # Start a second reauth flow so we can finish the first and assert that - # the reconfigure notification persists until the second one is complete - flow2 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) - - flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {}) - assert flow1["type"] == data_entry_flow.FlowResultType.ABORT - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" in notifications - - flow2 = await hass.config_entries.flow.async_configure(flow2["flow_id"], {}) - assert flow2["type"] == data_entry_flow.FlowResultType.ABORT - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" not in notifications - - async def test_reauth_issue( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -3566,37 +3489,97 @@ async def test_unique_id_not_update_existing_entry( assert len(async_reload.mock_calls) == 0 +ABORT_IN_PROGRESS = { + "type": data_entry_flow.FlowResultType.ABORT, + "reason": "already_in_progress", +} + + +@pytest.mark.parametrize( + ("existing_flow_source", "expected_result"), + # Test all sources except SOURCE_IGNORE + [ + (config_entries.SOURCE_BLUETOOTH, ABORT_IN_PROGRESS), + (config_entries.SOURCE_DHCP, ABORT_IN_PROGRESS), + (config_entries.SOURCE_DISCOVERY, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HARDWARE, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HASSIO, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HOMEKIT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_IMPORT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_INTEGRATION_DISCOVERY, ABORT_IN_PROGRESS), + (config_entries.SOURCE_MQTT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_REAUTH, {"type": data_entry_flow.FlowResultType.FORM}), + (config_entries.SOURCE_RECONFIGURE, ABORT_IN_PROGRESS), + (config_entries.SOURCE_SSDP, ABORT_IN_PROGRESS), + (config_entries.SOURCE_SYSTEM, ABORT_IN_PROGRESS), + (config_entries.SOURCE_USB, ABORT_IN_PROGRESS), + (config_entries.SOURCE_USER, ABORT_IN_PROGRESS), + (config_entries.SOURCE_ZEROCONF, ABORT_IN_PROGRESS), + ], +) async def test_unique_id_in_progress( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + existing_flow_source: str, + expected_result: dict, ) -> None: """Test that we abort if there is already a flow in progress with same unique id.""" mock_integration(hass, MockModule("comp")) mock_platform(hass, "comp.config_flow", None) + entry = MockConfigEntry(domain="comp") + entry.add_to_hass(hass) class TestFlow(config_entries.ConfigFlow): """Test flow.""" VERSION = 1 + async def _async_step_discovery_without_unique_id(self): + """Handle a flow initialized by discovery.""" + return await self._async_step() + + async def async_step_hardware(self, user_input=None): + """Test hardware step.""" + return await self._async_step() + + async def async_step_import(self, user_input=None): + """Test import step.""" + return await self._async_step() + + async def async_step_reauth(self, user_input=None): + """Test reauth step.""" + return await self._async_step() + + async def async_step_reconfigure(self, user_input=None): + """Test reconfigure step.""" + return await self._async_step() + + async def async_step_system(self, user_input=None): + """Test system step.""" + return await self._async_step() + async def async_step_user(self, user_input=None): """Test user step.""" + return await self._async_step() + + async def _async_step(self, user_input=None): + """Test step.""" await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( - "comp", context={"source": config_entries.SOURCE_USER} + "comp", context={"source": existing_flow_source, "entry_id": entry.entry_id} ) assert result["type"] == data_entry_flow.FlowResultType.FORM - # Will be canceled result2 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) - assert result2["type"] == data_entry_flow.FlowResultType.ABORT - assert result2["reason"] == "already_in_progress" + for k, v in expected_result.items(): + assert result2[k] == v async def test_finish_flow_aborts_progress( @@ -6506,7 +6489,7 @@ async def test_update_subentry_and_abort( class SubentryFlowHandler(config_entries.ConfigSubentryFlow): async def async_step_reconfigure(self, user_input=None): return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), **kwargs, ) @@ -8098,10 +8081,10 @@ async def test_get_reconfigure_entry( assert result["reason"] == "Source is user, expected reconfigure: -" -async def test_subentry_get_reconfigure_entry( +async def test_subentry_get_entry( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: - """Test subentry _get_reconfigure_entry and _get_reconfigure_subentry behavior.""" + """Test subentry _get_entry and _get_reconfigure_subentry behavior.""" subentry_id = "mock_subentry_id" entry = MockConfigEntry( data={}, @@ -8138,13 +8121,13 @@ async def test_subentry_get_reconfigure_entry( async def _async_step_confirm(self): """Confirm input.""" try: - entry = self._get_reconfigure_entry() + entry = self._get_entry() except ValueError as err: reason = str(err) else: reason = f"Found entry {entry.title}" try: - entry_id = self._reconfigure_entry_id + entry_id = self._entry_id except ValueError: reason = f"{reason}: -" else: @@ -8173,7 +8156,7 @@ async def test_subentry_get_reconfigure_entry( ) -> dict[str, type[config_entries.ConfigSubentryFlow]]: return {"test": TestFlow.SubentryFlowHandler} - # A reconfigure flow finds the config entry + # A reconfigure flow finds the config entry and subentry with mock_config_flow("test", TestFlow): result = await entry.start_subentry_reconfigure_flow(hass, "test", subentry_id) assert ( @@ -8195,14 +8178,14 @@ async def test_subentry_get_reconfigure_entry( == "Found entry entry_title: mock_entry_id/Subentry not found: 01JRemoved" ) - # A user flow does not have access to the config entry or subentry + # A user flow finds the config entry but not the subentry with mock_config_flow("test", TestFlow): result = await manager.subentries.async_init( (entry.entry_id, "test"), context={"source": config_entries.SOURCE_USER} ) assert ( result["reason"] - == "Source is user, expected reconfigure: -/Source is user, expected reconfigure: -" + == "Found entry entry_title: mock_entry_id/Source is user, expected reconfigure: -" ) diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 74a55cb4989..86ba5257001 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -133,6 +133,61 @@ async def test_show_form(manager: MockFlowManager) -> None: assert form["errors"] == {"username": "Should be unique."} +async def test_form_shows_with_added_suggested_values(manager: MockFlowManager) -> None: + """Test that we can show a form with suggested values.""" + schema = vol.Schema( + { + vol.Required("username"): str, + vol.Required("password"): str, + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("full_name"): str, + } + ), + {"collapsed": False}, + ), + } + ) + + @manager.mock_reg_handler("test") + class TestFlow(data_entry_flow.FlowHandler): + async def async_step_init(self, user_input=None): + data_schema = self.add_suggested_values_to_schema( + schema, + { + "username": "doej", + "password": "verySecret1", + "section_1": {"full_name": "John Doe"}, + }, + ) + return self.async_show_form( + step_id="init", + data_schema=data_schema, + ) + + form = await manager.async_init("test") + assert form["type"] == data_entry_flow.FlowResultType.FORM + assert form["data_schema"].schema == schema.schema + markers = list(form["data_schema"].schema) + assert len(markers) == 3 + assert markers[0] == "username" + assert markers[0].description == {"suggested_value": "doej"} + assert markers[1] == "password" + assert markers[1].description == {"suggested_value": "verySecret1"} + assert markers[2] == "section_1" + section_validator = form["data_schema"].schema["section_1"] + assert isinstance(section_validator, data_entry_flow.section) + # The section class was not replaced + assert section_validator is schema.schema["section_1"] + # The section schema was not replaced + assert section_validator.schema is schema.schema["section_1"].schema + section_markers = list(section_validator.schema.schema) + assert len(section_markers) == 1 + assert section_markers[0] == "full_name" + assert section_markers[0].description == {"suggested_value": "John Doe"} + + async def test_abort_removes_instance(manager: MockFlowManager) -> None: """Test that abort removes the flow from progress.""" diff --git a/tests/test_loader.py b/tests/test_loader.py index 548091a3503..0b83ddee3ea 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -27,33 +27,42 @@ async def test_circular_component_dependencies(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) mock_integration(hass, MockModule("mod3", dependencies=["mod1"])) mod_4 = mock_integration(hass, MockModule("mod4", dependencies=["mod2", "mod3"])) + all_domains = {"mod1", "mod2", "mod3", "mod4"} - deps = await loader._async_component_dependencies(hass, mod_4) - assert deps == {"mod1", "mod2", "mod3", "mod4"} + deps = await loader._do_resolve_dependencies(mod_4, cache={}) + assert deps == {"mod1", "mod2", "mod3"} # Create a circular dependency mock_integration(hass, MockModule("mod1", dependencies=["mod4"])) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies(mod_4, cache={}) # Create a different circular dependency mock_integration(hass, MockModule("mod1", dependencies=["mod3"])) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies(mod_4, cache={}) # Create a circular after_dependency mock_integration( hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod4"]}) ) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) # Create a different circular after_dependency mock_integration( hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod3"]}) ) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) # Create a circular after_dependency without a hard dependency mock_integration( @@ -62,29 +71,48 @@ async def test_circular_component_dependencies(hass: HomeAssistant) -> None: mod_4 = mock_integration( hass, MockModule("mod4", partial_manifest={"after_dependencies": ["mod2"]}) ) - # this currently doesn't raise, but it should. Will be improved in a follow-up. - await loader._async_component_dependencies(hass, mod_4) + with pytest.raises(loader.CircularDependency): + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) + + result = await loader.resolve_integrations_after_dependencies(hass, (mod_4,)) + assert result == {} + result = await loader.resolve_integrations_after_dependencies( + hass, (mod_4,), ignore_exceptions=True + ) + assert result["mod4"] == {"mod4", "mod2", "mod1"} async def test_nonexistent_component_dependencies(hass: HomeAssistant) -> None: """Test if we can detect nonexistent dependencies of components.""" mod_1 = mock_integration(hass, MockModule("mod1", dependencies=["nonexistent"])) - with pytest.raises(loader.IntegrationNotFound): - await loader._async_component_dependencies(hass, mod_1) mod_2 = mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) - assert not await mod_2.resolve_dependencies() + assert await mod_2.resolve_dependencies() is None assert mod_2.all_dependencies_resolved - with pytest.raises(RuntimeError): + with pytest.raises(loader.IntegrationNotFound): mod_2.all_dependencies # noqa: B018 - # this currently is not resolved, because intermediate results are not cached - # this will be improved in a follow-up - assert not mod_1.all_dependencies_resolved - assert not await mod_1.resolve_dependencies() - with pytest.raises(RuntimeError): + assert mod_1.all_dependencies_resolved + assert await mod_1.resolve_dependencies() is None + with pytest.raises(loader.IntegrationNotFound): mod_1.all_dependencies # noqa: B018 + result = await loader.resolve_integrations_dependencies(hass, (mod_2, mod_1)) + assert result == {} + + mod_1 = mock_integration( + hass, + MockModule("mod1", partial_manifest={"after_dependencies": ["non.existent"]}), + ) + mod_2 = mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) + + result = await loader.resolve_integrations_after_dependencies(hass, (mod_2, mod_1)) + assert result == {} + def test_component_loader(hass: HomeAssistant) -> None: """Test loading components.""" diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index e5b85f35693..d213a68d7f2 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -6,6 +6,7 @@ import logging import queue from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import ( @@ -17,6 +18,13 @@ from homeassistant.core import ( from homeassistant.util import logging as logging_util +async def empty_log_queue() -> None: + """Empty the log queue.""" + log_queue: queue.SimpleQueue = logging.root.handlers[0].queue + while not log_queue.empty(): + await asyncio.sleep(0) + + async def test_logging_with_queue_handler() -> None: """Test logging with HomeAssistantQueueHandler.""" @@ -149,3 +157,115 @@ async def test_catch_log_exception_catches_and_logs() -> None: func("failure sync passed") assert saved_args == [("failure sync passed",)] + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 5) +@pytest.mark.parametrize( + ( + "logger1_count", + "logger1_expected_notices", + "logger2_count", + "logger2_expected_notices", + ), + [(4, 0, 0, 0), (5, 1, 1, 0), (11, 1, 5, 1), (20, 1, 20, 1)], +) +async def test_noisy_loggers( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + logger1_count: int, + logger1_expected_notices: int, + logger2_count: int, + logger2_expected_notices: int, +) -> None: + """Test that noisy loggers all logged as warnings.""" + + logging_util.async_activate_log_queue_handler(hass) + logger1 = logging.getLogger("noisy1") + logger2 = logging.getLogger("noisy2.module") + + for _ in range(logger1_count): + logger1.info("This is a log") + + for _ in range(logger2_count): + logger2.info("This is another log") + + await empty_log_queue() + + assert ( + caplog.text.count( + "Module noisy1 is logging too frequently. 5 messages since last count" + ) + == logger1_expected_notices + ) + assert ( + caplog.text.count( + "Module noisy2.module is logging too frequently. 5 messages since last count" + ) + == logger2_expected_notices + ) + + # close the handler so the queue thread stops + logging.root.handlers[0].close() + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 5) +async def test_noisy_loggers_ignores_lower_than_info( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test that noisy loggers all logged as warnings, except for levels lower than INFO.""" + + logging_util.async_activate_log_queue_handler(hass) + logger = logging.getLogger("noisy_module") + + for _ in range(5): + logger.debug("This is a log") + + await empty_log_queue() + expected_warning = "Module noisy_module is logging too frequently" + assert caplog.text.count(expected_warning) == 0 + + logger.info("This is a log") + logger.info("This is a log") + logger.warning("This is a log") + logger.error("This is a log") + logger.critical("This is a log") + + await empty_log_queue() + assert caplog.text.count(expected_warning) == 1 + + # close the handler so the queue thread stops + logging.root.handlers[0].close() + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 3) +async def test_noisy_loggers_counters_reset( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that noisy logger counters reset periodically.""" + + logging_util.async_activate_log_queue_handler(hass) + logger = logging.getLogger("noisy_module") + + expected_warning = "Module noisy_module is logging too frequently" + + # Do multiple iterations to ensure the reset is periodic + for _ in range(logging_util.HomeAssistantQueueListener.MAX_LOGS_COUNT * 2): + logger.info("This is log 0") + await empty_log_queue() + + freezer.tick( + logging_util.HomeAssistantQueueListener.LOG_COUNTS_RESET_INTERVAL + 1 + ) + + logger.info("This is log 1") + await empty_log_queue() + assert caplog.text.count(expected_warning) == 0 + + logger.info("This is log 2") + logger.info("This is log 3") + await empty_log_queue() + assert caplog.text.count(expected_warning) == 1 + # close the handler so the queue thread stops + logging.root.handlers[0].close() diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 0346e21044f..dacbd2c1247 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -374,7 +374,7 @@ def test_include_dir_merge_named_recursive(mock_walk: Mock) -> None: } -@patch("homeassistant.util.yaml.loader.open", create=True) +@patch("annotatedyaml.loader.open", create=True) @pytest.mark.usefixtures("try_both_loaders") def test_load_yaml_encoding_error(mock_open: Mock) -> None: """Test raising a UnicodeDecodeError.""" @@ -598,7 +598,7 @@ def test_load_yaml_wrap_oserror( ) -> None: """Test load_yaml wraps OSError in HomeAssistantError.""" with ( - patch("homeassistant.util.yaml.loader.open", side_effect=open_exception), + patch("annotatedyaml.loader.open", side_effect=open_exception), pytest.raises(load_yaml_exception), ): yaml_loader.load_yaml("bla")